Initial Commit
This commit is contained in:
parent
de861d4815
commit
2be3d00ac4
3
backend_flask/.flaskenv
Normal file
3
backend_flask/.flaskenv
Normal file
@ -0,0 +1,3 @@
|
||||
FLASK_APP="myapp:create_app()"
|
||||
FLASK_ENV="development"
|
||||
PYTHONPATH=.
|
||||
0
backend_flask/__init__.py
Normal file
0
backend_flask/__init__.py
Normal file
0
backend_flask/celery_worker/__init__.py
Normal file
0
backend_flask/celery_worker/__init__.py
Normal file
329
backend_flask/celery_worker/celery_app.py
Normal file
329
backend_flask/celery_worker/celery_app.py
Normal file
@ -0,0 +1,329 @@
|
||||
import datetime
|
||||
import re
|
||||
import string
|
||||
from collections import Counter, defaultdict
|
||||
|
||||
import requests
|
||||
from requests.exceptions import Timeout, RequestException, ConnectionError, HTTPError
|
||||
from bs4 import BeautifulSoup
|
||||
from bson.objectid import ObjectId, InvalidId
|
||||
from celery import Celery
|
||||
|
||||
import pymongo
|
||||
from ..myapp.config import Config
|
||||
|
||||
# --- Database Connection Setup for Celery ---
|
||||
mongo_client = None
|
||||
mongo_db = None
|
||||
|
||||
|
||||
def get_db():
|
||||
"""
|
||||
Helper to get a dedicated DB connection for Celery tasks.
|
||||
"""
|
||||
global mongo_client, mongo_db
|
||||
if mongo_db is None:
|
||||
try:
|
||||
print("Task: Initializing new MongoDB client connection...")
|
||||
mongo_client = pymongo.MongoClient(Config.MONGO_URI, serverSelectionTimeoutMS=5000)
|
||||
mongo_client.admin.command('ismaster')
|
||||
mongo_db = mongo_client.get_database()
|
||||
print(f"Task: MongoDB connection successful. Using database: {mongo_db.name}")
|
||||
except Exception as e:
|
||||
print(f"FATAL Task Error: Could not connect to MongoDB: {e}")
|
||||
mongo_db = None
|
||||
raise RuntimeError(f"Database connection failed in Celery task: {e}")
|
||||
return mongo_db
|
||||
|
||||
|
||||
# --- Celery configuration ---
|
||||
celery = Celery("my_celery_app")
|
||||
celery.conf.broker_url = Config.CELERY_BROKER_URL
|
||||
celery.conf.result_backend = Config.CELERY_RESULT_BACKEND
|
||||
|
||||
REQUEST_TIMEOUT = 15
|
||||
|
||||
# --- Text Cleaning Setup ---
|
||||
PUNCT_REGEX = re.compile(rf"[{re.escape(string.punctuation)}]+")
|
||||
STOPWORDS = {
|
||||
"the", "is", "a", "an", "of", "to", "and", "for", "on", "in", "that", "it", "with", "as", "at", "be",
|
||||
"this", "are", "was", "were", "will", "would", "or", "so", "if", "then", "from", "not", "by", "we", "you",
|
||||
"i", "he", "she", "they", "have", "has", "had", "but", "too", "some", "any", "my", "your", "their", "me"
|
||||
}
|
||||
|
||||
|
||||
def clean_and_tokenize(text):
|
||||
"""
|
||||
Performs minimal cleaning by splitting the text.
|
||||
For English this works reasonably; for Chinese you may use a segmentation library like jieba.
|
||||
"""
|
||||
if not text:
|
||||
return []
|
||||
return text.split()
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Task: Asynchronous Title and Keyword Extraction
|
||||
# --------------------------
|
||||
@celery.task(bind=True, max_retries=1, default_retry_delay=10)
|
||||
def async_extract_title_and_keywords(self, url_id_str, user_id_str):
|
||||
"""
|
||||
Fetches the webpage, extracts the title and computes the top 20 keywords from its body text.
|
||||
Updates the URL document with the new title and keywords.
|
||||
"""
|
||||
print(f"Task: Starting title/keyword extraction for URL ID: {url_id_str}")
|
||||
try:
|
||||
db = get_db()
|
||||
url_obj_id = ObjectId(url_id_str)
|
||||
except InvalidId:
|
||||
print(f"Task Error: Invalid URL ID format: {url_id_str}")
|
||||
return "Invalid URL ID format."
|
||||
except Exception as e:
|
||||
print(f"Task Error: Could not initialize DB or ObjectId: {e}")
|
||||
return f"DB/ObjectId Error: {e}"
|
||||
|
||||
if db is None:
|
||||
print(f"Task Error: DB connection is None for URL ID: {url_id_str}")
|
||||
return "DB connection error."
|
||||
|
||||
try:
|
||||
url_doc = db.urls.find_one({"_id": url_obj_id})
|
||||
if not url_doc:
|
||||
print(f"Task Error: URL doc not found for ID: {url_id_str}")
|
||||
return "URL doc not found."
|
||||
|
||||
page_url = url_doc.get("url", "")
|
||||
if not page_url:
|
||||
print(f"Task Error: No URL found in doc: {url_id_str}")
|
||||
db.urls.update_one({"_id": url_obj_id},
|
||||
{"$set": {"processingStatus": "failed", "updatedAt": datetime.datetime.utcnow()}})
|
||||
return "No URL found in doc."
|
||||
|
||||
page_title = ""
|
||||
keywords_list = []
|
||||
status_to_set = "failed"
|
||||
|
||||
try:
|
||||
print(f"Task: Fetching URL: {page_url} with timeout={REQUEST_TIMEOUT}")
|
||||
headers = {'User-Agent': 'Mozilla/5.0 (compatible; SurfSmartBot/1.0; +http://example.com/bot)'}
|
||||
r = requests.get(page_url, timeout=REQUEST_TIMEOUT, headers=headers, allow_redirects=True)
|
||||
r.raise_for_status()
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
page_title = soup.title.string.strip() if soup.title else url_doc.get("title", "")
|
||||
body_text = soup.body.get_text(" ", strip=True) if soup.body else ""
|
||||
tokens = clean_and_tokenize(body_text)
|
||||
if tokens:
|
||||
counter = Counter(tokens)
|
||||
top_20 = counter.most_common(20)
|
||||
total_count = sum(count for _, count in top_20)
|
||||
for word, count in top_20:
|
||||
perc = round((count / total_count) * 100, 2) if total_count > 0 else 0
|
||||
keywords_list.append({"word": word, "percentage": perc})
|
||||
status_to_set = "completed"
|
||||
print(
|
||||
f"Task: Extraction completed for URL {url_id_str}. Title: '{page_title}', Keywords count: {len(keywords_list)}")
|
||||
except Timeout:
|
||||
print(f"Task Error: Request timed out for URL: {page_url}")
|
||||
except ConnectionError:
|
||||
print(f"Task Error: Connection error for URL: {page_url}")
|
||||
except HTTPError as http_err:
|
||||
print(f"Task Error: HTTP error occurred: {http_err} for URL: {page_url}")
|
||||
except RequestException as req_err:
|
||||
print(f"Task Error: Request exception for URL {page_url}: {req_err}")
|
||||
except Exception as e:
|
||||
print(f"Task Error: Unexpected error processing URL {page_url}: {e}")
|
||||
try:
|
||||
self.retry(exc=e)
|
||||
except Exception as retry_err:
|
||||
print(f"Task Error: Retry failed for URL {url_id_str}: {retry_err}")
|
||||
|
||||
update_data = {
|
||||
"processingStatus": status_to_set,
|
||||
"updatedAt": datetime.datetime.utcnow()
|
||||
}
|
||||
if status_to_set == "completed":
|
||||
update_data["title"] = page_title
|
||||
update_data["keywords"] = keywords_list
|
||||
|
||||
db.urls.update_one({"_id": url_obj_id}, {"$set": update_data})
|
||||
print(f"Task: DB updated for URL {url_id_str} with extraction status '{status_to_set}'")
|
||||
return f"OK: Extraction task completed with status {status_to_set}"
|
||||
except Exception as e:
|
||||
print(f"Task Error: Failed during extraction for URL {url_id_str}: {e}")
|
||||
try:
|
||||
db.urls.update_one({"_id": url_obj_id},
|
||||
{"$set": {"processingStatus": "failed", "updatedAt": datetime.datetime.utcnow()}})
|
||||
except Exception:
|
||||
pass
|
||||
return f"Error: Extraction task failed for URL {url_id_str}"
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Task: Asynchronous Summarization
|
||||
# --------------------------
|
||||
@celery.task(bind=True, max_retries=1, default_retry_delay=10)
|
||||
def async_summarize_url(self, url_id_str, user_id_str, use_gemini):
|
||||
"""
|
||||
Fetches webpage content and extracts up to the first 1000 words.
|
||||
If use_gemini is True and a valid Gemini API key is present, builds an effective prompt
|
||||
and calls Gemini to generate a ~300-word summary (under 350 words). Otherwise, truncates the text
|
||||
to around 300 words.
|
||||
Updates the URL document's 'summary' and 'processingStatus' accordingly.
|
||||
"""
|
||||
print(f"Task: Starting summary generation for URL ID: {url_id_str}")
|
||||
try:
|
||||
db = get_db()
|
||||
url_obj_id = ObjectId(url_id_str)
|
||||
except InvalidId:
|
||||
print(f"Task Error: Invalid URL ID format: {url_id_str}")
|
||||
return "Invalid URL ID format."
|
||||
except Exception as e:
|
||||
print(f"Task Error: Could not initialize DB or ObjectId: {e}")
|
||||
return f"DB/ObjectId Error: {e}"
|
||||
|
||||
if db is None:
|
||||
print(f"Task Error: DB connection is None for URL ID: {url_id_str}")
|
||||
return "DB connection error."
|
||||
|
||||
try:
|
||||
url_doc = db.urls.find_one({"_id": url_obj_id})
|
||||
if not url_doc:
|
||||
print(f"Task Error: URL doc not found for ID: {url_id_str}")
|
||||
return "URL doc not found."
|
||||
|
||||
page_url = url_doc.get("url", "")
|
||||
if not page_url:
|
||||
print(f"Task Error: No URL found in doc: {url_id_str}")
|
||||
db.urls.update_one({"_id": url_obj_id},
|
||||
{"$set": {"processingStatus": "failed", "updatedAt": datetime.datetime.utcnow()}})
|
||||
return "No URL found in doc."
|
||||
|
||||
headers = {'User-Agent': 'Mozilla/5.0 (compatible; SurfSmartBot/1.0; +http://example.com/bot)'}
|
||||
r = requests.get(page_url, timeout=REQUEST_TIMEOUT, headers=headers, allow_redirects=True)
|
||||
r.raise_for_status()
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
body_text = soup.body.get_text(" ", strip=True) if soup.body else ""
|
||||
words_full = body_text.split() # For better language support, integrate a segmentation tool if needed.
|
||||
text_1000 = " ".join(words_full[:1000])
|
||||
|
||||
summary_result = ""
|
||||
if use_gemini:
|
||||
api_doc = db.api_list.find_one({"uid": ObjectId(user_id_str), "selected": True, "name": "Gemini"})
|
||||
if api_doc and api_doc.get("key"):
|
||||
gemini_key = api_doc.get("key")
|
||||
prompt = (
|
||||
"You are an expert summarizer. Below is text extracted from a webpage. "
|
||||
"Please generate a concise, high-quality summary of approximately 300 words (but under 350 words). "
|
||||
"Ensure the summary is in the same language as the input text.\n\n" +
|
||||
text_1000
|
||||
)
|
||||
try:
|
||||
import google.generativeai as genai
|
||||
from google.api_core import exceptions as google_exceptions
|
||||
genai.configure(api_key=gemini_key)
|
||||
GEMINI_MODEL_NAME = 'gemini-1.5-pro-latest'
|
||||
model = genai.GenerativeModel(GEMINI_MODEL_NAME)
|
||||
gemini_input = [{"role": "user", "parts": [{"text": prompt}]}]
|
||||
llm_response = model.generate_content(gemini_input)
|
||||
summary_result = llm_response.text if llm_response.parts else ""
|
||||
except Exception as gem_err:
|
||||
print(f"Task Error: Gemini API error: {gem_err}. Falling back to truncation.")
|
||||
summary_result = " ".join(text_1000.split()[:300])
|
||||
else:
|
||||
summary_result = " ".join(text_1000.split()[:300])
|
||||
else:
|
||||
summary_result = " ".join(text_1000.split()[:300])
|
||||
|
||||
status_to_set = "completed" if summary_result.strip() else "failed"
|
||||
update_data = {
|
||||
"summary": summary_result,
|
||||
"processingStatus": status_to_set,
|
||||
"updatedAt": datetime.datetime.utcnow()
|
||||
}
|
||||
db.urls.update_one({"_id": url_obj_id}, {"$set": update_data})
|
||||
print(
|
||||
f"Task: Summary generation for URL {url_id_str} completed with status '{status_to_set}'. Word count: {len(summary_result.split())}")
|
||||
return f"OK: Summary task completed with status {status_to_set}"
|
||||
except Timeout:
|
||||
print(f"Task Error: Request timed out for URL: {page_url}")
|
||||
except ConnectionError:
|
||||
print(f"Task Error: Connection error for URL: {page_url}")
|
||||
except HTTPError as http_err:
|
||||
print(f"Task Error: HTTP error occurred: {http_err} for URL: {page_url}")
|
||||
except RequestException as req_err:
|
||||
print(f"Task Error: Request exception for URL {page_url}: {req_err}")
|
||||
except Exception as e:
|
||||
print(f"Task Error: Unexpected error during summarization for URL {page_url}: {e}")
|
||||
try:
|
||||
self.retry(exc=e)
|
||||
except Exception as retry_err:
|
||||
print(f"Task Error: Retry failed for URL {url_id_str}: {retry_err}")
|
||||
try:
|
||||
db.urls.update_one({"_id": url_obj_id},
|
||||
{"$set": {"processingStatus": "failed", "updatedAt": datetime.datetime.utcnow()}})
|
||||
except Exception:
|
||||
pass
|
||||
return f"Error: Summarization task failed for URL {url_id_str}"
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Task: Asynchronous Recalculate Project Keywords
|
||||
# --------------------------
|
||||
@celery.task(bind=True, max_retries=1, default_retry_delay=10)
|
||||
def async_recalc_project_keywords(self, project_id, user_id_str):
|
||||
"""
|
||||
Recalculates project keywords by summing the percentages from all associated URL documents.
|
||||
Retains the top 20 keywords and updates the project document.
|
||||
"""
|
||||
print(f"Task: Starting keywords recalculation for project {project_id}")
|
||||
try:
|
||||
db = get_db()
|
||||
project_obj_id = ObjectId(project_id)
|
||||
except InvalidId:
|
||||
print(f"Task Error: Invalid project ID format: {project_id}")
|
||||
return "Invalid project ID format."
|
||||
except Exception as e:
|
||||
print(f"Task Error: Unable to initialize DB or convert project ID: {e}")
|
||||
return f"DB/ObjectId Error: {e}"
|
||||
|
||||
if db is None:
|
||||
print(f"Task Error: DB connection is None for project {project_id}")
|
||||
return "DB connection error."
|
||||
|
||||
try:
|
||||
cursor = db.urls.find({"projectId": project_obj_id}, {"keywords": 1})
|
||||
combined = defaultdict(float)
|
||||
for doc in cursor:
|
||||
keywords_list = doc.get("keywords", [])
|
||||
if isinstance(keywords_list, list):
|
||||
for kw in keywords_list:
|
||||
if isinstance(kw, dict):
|
||||
word = kw.get("word", "").strip()
|
||||
try:
|
||||
percentage = float(kw.get("percentage", 0.0))
|
||||
except (ValueError, TypeError):
|
||||
percentage = 0.0
|
||||
if word and isinstance(word, str):
|
||||
combined[word] += percentage
|
||||
else:
|
||||
print(f"Task Warning: Non-dict item in keywords for a URL in project {project_id}")
|
||||
else:
|
||||
print(f"Task Warning: Keywords field is not a list for a URL in project {project_id}")
|
||||
|
||||
sorted_kw = sorted(combined.items(), key=lambda x: x[1], reverse=True)[:20]
|
||||
top_keywords = [{"word": w, "percentage": round(val, 2)} for w, val in sorted_kw]
|
||||
|
||||
update_data = {
|
||||
"keywords": top_keywords,
|
||||
"updatedAt": datetime.datetime.utcnow()
|
||||
}
|
||||
db.projects.update_one({"_id": project_obj_id}, {"$set": update_data})
|
||||
print(f"Task: Keywords recalculation for project {project_id} completed. Top keywords: {top_keywords}")
|
||||
return f"OK: Project keywords recalculated successfully."
|
||||
except Exception as e:
|
||||
print(f"Task Error: Failed during keywords recalculation for project {project_id}: {e}")
|
||||
try:
|
||||
db.projects.update_one({"_id": project_obj_id}, {"$set": {"updatedAt": datetime.datetime.utcnow()}})
|
||||
except Exception:
|
||||
pass
|
||||
return f"Error: Keywords recalculation failed for project {project_id}"
|
||||
247
backend_flask/flask/bin/Activate.ps1
Normal file
247
backend_flask/flask/bin/Activate.ps1
Normal file
@ -0,0 +1,247 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
||||
70
backend_flask/flask/bin/activate
Normal file
70
backend_flask/flask/bin/activate
Normal file
@ -0,0 +1,70 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# You cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
# on Windows, a path can contain colons and backslashes and has to be converted:
|
||||
if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
|
||||
# transform D:\path\to\venv to /d/path/to/venv on MSYS
|
||||
# and to /cygdrive/d/path/to/venv on Cygwin
|
||||
export VIRTUAL_ENV=$(cygpath /home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask)
|
||||
else
|
||||
# use the path as-is
|
||||
export VIRTUAL_ENV=/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask
|
||||
fi
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/"bin":$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1='(flask) '"${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT='(flask) '
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
||||
27
backend_flask/flask/bin/activate.csh
Normal file
27
backend_flask/flask/bin/activate.csh
Normal file
@ -0,0 +1,27 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV /home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = '(flask) '"$prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT '(flask) '
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
||||
69
backend_flask/flask/bin/activate.fish
Normal file
69
backend_flask/flask/bin/activate.fish
Normal file
@ -0,0 +1,69 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/). You cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV /home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) '(flask) ' (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT '(flask) '
|
||||
end
|
||||
8
backend_flask/flask/bin/celery
Normal file
8
backend_flask/flask/bin/celery
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from celery.__main__ import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
backend_flask/flask/bin/dotenv
Normal file
8
backend_flask/flask/bin/dotenv
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from dotenv.__main__ import cli
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli())
|
||||
8
backend_flask/flask/bin/f2py
Normal file
8
backend_flask/flask/bin/f2py
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from numpy.f2py.f2py2e import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
backend_flask/flask/bin/flask
Normal file
8
backend_flask/flask/bin/flask
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from flask.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
backend_flask/flask/bin/httpx
Normal file
8
backend_flask/flask/bin/httpx
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from httpx import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
41
backend_flask/flask/bin/jsondiff
Normal file
41
backend_flask/flask/bin/jsondiff
Normal file
@ -0,0 +1,41 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import json
|
||||
import jsonpatch
|
||||
import argparse
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Diff two JSON files')
|
||||
parser.add_argument('FILE1', type=argparse.FileType('r'))
|
||||
parser.add_argument('FILE2', type=argparse.FileType('r'))
|
||||
parser.add_argument('--indent', type=int, default=None,
|
||||
help='Indent output by n spaces')
|
||||
parser.add_argument('-u', '--preserve-unicode', action='store_true',
|
||||
help='Output Unicode character as-is without using Code Point')
|
||||
parser.add_argument('-v', '--version', action='version',
|
||||
version='%(prog)s ' + jsonpatch.__version__)
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
diff_files()
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def diff_files():
|
||||
""" Diffs two JSON files and prints a patch """
|
||||
args = parser.parse_args()
|
||||
doc1 = json.load(args.FILE1)
|
||||
doc2 = json.load(args.FILE2)
|
||||
patch = jsonpatch.make_patch(doc1, doc2)
|
||||
if patch.patch:
|
||||
print(json.dumps(patch.patch, indent=args.indent, ensure_ascii=not(args.preserve_unicode)))
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
107
backend_flask/flask/bin/jsonpatch
Normal file
107
backend_flask/flask/bin/jsonpatch
Normal file
@ -0,0 +1,107 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
import json
|
||||
import jsonpatch
|
||||
import tempfile
|
||||
import argparse
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Apply a JSON patch on a JSON file')
|
||||
parser.add_argument('ORIGINAL', type=argparse.FileType('r'),
|
||||
help='Original file')
|
||||
parser.add_argument('PATCH', type=argparse.FileType('r'),
|
||||
nargs='?', default=sys.stdin,
|
||||
help='Patch file (read from stdin if omitted)')
|
||||
parser.add_argument('--indent', type=int, default=None,
|
||||
help='Indent output by n spaces')
|
||||
parser.add_argument('-b', '--backup', action='store_true',
|
||||
help='Back up ORIGINAL if modifying in-place')
|
||||
parser.add_argument('-i', '--in-place', action='store_true',
|
||||
help='Modify ORIGINAL in-place instead of to stdout')
|
||||
parser.add_argument('-v', '--version', action='version',
|
||||
version='%(prog)s ' + jsonpatch.__version__)
|
||||
parser.add_argument('-u', '--preserve-unicode', action='store_true',
|
||||
help='Output Unicode character as-is without using Code Point')
|
||||
|
||||
def main():
|
||||
try:
|
||||
patch_files()
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def patch_files():
|
||||
""" Diffs two JSON files and prints a patch """
|
||||
args = parser.parse_args()
|
||||
doc = json.load(args.ORIGINAL)
|
||||
patch = json.load(args.PATCH)
|
||||
result = jsonpatch.apply_patch(doc, patch)
|
||||
|
||||
if args.in_place:
|
||||
dirname = os.path.abspath(os.path.dirname(args.ORIGINAL.name))
|
||||
|
||||
try:
|
||||
# Attempt to replace the file atomically. We do this by
|
||||
# creating a temporary file in the same directory as the
|
||||
# original file so we can atomically move the new file over
|
||||
# the original later. (This is done in the same directory
|
||||
# because atomic renames do not work across mount points.)
|
||||
|
||||
fd, pathname = tempfile.mkstemp(dir=dirname)
|
||||
fp = os.fdopen(fd, 'w')
|
||||
atomic = True
|
||||
|
||||
except OSError:
|
||||
# We failed to create the temporary file for an atomic
|
||||
# replace, so fall back to non-atomic mode by backing up
|
||||
# the original (if desired) and writing a new file.
|
||||
|
||||
if args.backup:
|
||||
os.rename(args.ORIGINAL.name, args.ORIGINAL.name + '.orig')
|
||||
fp = open(args.ORIGINAL.name, 'w')
|
||||
atomic = False
|
||||
|
||||
else:
|
||||
# Since we're not replacing the original file in-place, write
|
||||
# the modified JSON to stdout instead.
|
||||
|
||||
fp = sys.stdout
|
||||
|
||||
# By this point we have some sort of file object we can write the
|
||||
# modified JSON to.
|
||||
|
||||
json.dump(result, fp, indent=args.indent, ensure_ascii=not(args.preserve_unicode))
|
||||
fp.write('\n')
|
||||
|
||||
if args.in_place:
|
||||
# Close the new file. If we aren't replacing atomically, this
|
||||
# is our last step, since everything else is already in place.
|
||||
|
||||
fp.close()
|
||||
|
||||
if atomic:
|
||||
try:
|
||||
# Complete the atomic replace by linking the original
|
||||
# to a backup (if desired), fixing up the permissions
|
||||
# on the temporary file, and moving it into place.
|
||||
|
||||
if args.backup:
|
||||
os.link(args.ORIGINAL.name, args.ORIGINAL.name + '.orig')
|
||||
os.chmod(pathname, os.stat(args.ORIGINAL.name).st_mode)
|
||||
os.rename(pathname, args.ORIGINAL.name)
|
||||
|
||||
except OSError:
|
||||
# In the event we could not actually do the atomic
|
||||
# replace, unlink the original to move it out of the
|
||||
# way and finally move the temporary file into place.
|
||||
|
||||
os.unlink(args.ORIGINAL.name)
|
||||
os.rename(pathname, args.ORIGINAL.name)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
67
backend_flask/flask/bin/jsonpointer
Normal file
67
backend_flask/flask/bin/jsonpointer
Normal file
@ -0,0 +1,67 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
|
||||
import jsonpointer
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Resolve a JSON pointer on JSON files')
|
||||
|
||||
# Accept pointer as argument or as file
|
||||
ptr_group = parser.add_mutually_exclusive_group(required=True)
|
||||
|
||||
ptr_group.add_argument('-f', '--pointer-file', type=argparse.FileType('r'),
|
||||
nargs='?',
|
||||
help='File containing a JSON pointer expression')
|
||||
|
||||
ptr_group.add_argument('POINTER', type=str, nargs='?',
|
||||
help='A JSON pointer expression')
|
||||
|
||||
parser.add_argument('FILE', type=argparse.FileType('r'), nargs='+',
|
||||
help='Files for which the pointer should be resolved')
|
||||
parser.add_argument('--indent', type=int, default=None,
|
||||
help='Indent output by n spaces')
|
||||
parser.add_argument('-v', '--version', action='version',
|
||||
version='%(prog)s ' + jsonpointer.__version__)
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
resolve_files()
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def parse_pointer(args):
|
||||
if args.POINTER:
|
||||
ptr = args.POINTER
|
||||
elif args.pointer_file:
|
||||
ptr = args.pointer_file.read().strip()
|
||||
else:
|
||||
parser.print_usage()
|
||||
sys.exit(1)
|
||||
|
||||
return ptr
|
||||
|
||||
|
||||
def resolve_files():
|
||||
""" Resolve a JSON pointer on JSON files """
|
||||
args = parser.parse_args()
|
||||
|
||||
ptr = parse_pointer(args)
|
||||
|
||||
for f in args.FILE:
|
||||
doc = json.load(f)
|
||||
try:
|
||||
result = jsonpointer.resolve_pointer(doc, ptr)
|
||||
print(json.dumps(result, indent=args.indent))
|
||||
except jsonpointer.JsonPointerException as e:
|
||||
print('Could not resolve pointer: %s' % str(e), file=sys.stderr)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
8
backend_flask/flask/bin/langchain-server
Normal file
8
backend_flask/flask/bin/langchain-server
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from langchain.server import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
backend_flask/flask/bin/normalizer
Normal file
8
backend_flask/flask/bin/normalizer
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from charset_normalizer import cli
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli.cli_detect())
|
||||
8
backend_flask/flask/bin/pip
Normal file
8
backend_flask/flask/bin/pip
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
backend_flask/flask/bin/pip3
Normal file
8
backend_flask/flask/bin/pip3
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
backend_flask/flask/bin/pip3.12
Normal file
8
backend_flask/flask/bin/pip3.12
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
backend_flask/flask/bin/py.test
Normal file
8
backend_flask/flask/bin/py.test
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pytest import console_main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(console_main())
|
||||
8
backend_flask/flask/bin/pyrsa-decrypt
Normal file
8
backend_flask/flask/bin/pyrsa-decrypt
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.cli import decrypt
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(decrypt())
|
||||
8
backend_flask/flask/bin/pyrsa-encrypt
Normal file
8
backend_flask/flask/bin/pyrsa-encrypt
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.cli import encrypt
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(encrypt())
|
||||
8
backend_flask/flask/bin/pyrsa-keygen
Normal file
8
backend_flask/flask/bin/pyrsa-keygen
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.cli import keygen
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(keygen())
|
||||
8
backend_flask/flask/bin/pyrsa-priv2pub
Normal file
8
backend_flask/flask/bin/pyrsa-priv2pub
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.util import private_to_public
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(private_to_public())
|
||||
8
backend_flask/flask/bin/pyrsa-sign
Normal file
8
backend_flask/flask/bin/pyrsa-sign
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.cli import sign
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(sign())
|
||||
8
backend_flask/flask/bin/pyrsa-verify
Normal file
8
backend_flask/flask/bin/pyrsa-verify
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from rsa.cli import verify
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(verify())
|
||||
8
backend_flask/flask/bin/pytest
Normal file
8
backend_flask/flask/bin/pytest
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pytest import console_main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(console_main())
|
||||
1
backend_flask/flask/bin/python
Normal file
1
backend_flask/flask/bin/python
Normal file
@ -0,0 +1 @@
|
||||
python3
|
||||
1
backend_flask/flask/bin/python3
Normal file
1
backend_flask/flask/bin/python3
Normal file
@ -0,0 +1 @@
|
||||
/usr/bin/python3
|
||||
1
backend_flask/flask/bin/python3.12
Normal file
1
backend_flask/flask/bin/python3.12
Normal file
@ -0,0 +1 @@
|
||||
python3
|
||||
8
backend_flask/flask/bin/tqdm
Normal file
8
backend_flask/flask/bin/tqdm
Normal file
@ -0,0 +1,8 @@
|
||||
#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from tqdm.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
164
backend_flask/flask/include/site/python3.12/greenlet/greenlet.h
Normal file
164
backend_flask/flask/include/site/python3.12/greenlet/greenlet.h
Normal file
@ -0,0 +1,164 @@
|
||||
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
|
||||
|
||||
/* Greenlet object interface */
|
||||
|
||||
#ifndef Py_GREENLETOBJECT_H
|
||||
#define Py_GREENLETOBJECT_H
|
||||
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/* This is deprecated and undocumented. It does not change. */
|
||||
#define GREENLET_VERSION "1.0.0"
|
||||
|
||||
#ifndef GREENLET_MODULE
|
||||
#define implementation_ptr_t void*
|
||||
#endif
|
||||
|
||||
typedef struct _greenlet {
|
||||
PyObject_HEAD
|
||||
PyObject* weakreflist;
|
||||
PyObject* dict;
|
||||
implementation_ptr_t pimpl;
|
||||
} PyGreenlet;
|
||||
|
||||
#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
|
||||
|
||||
|
||||
/* C API functions */
|
||||
|
||||
/* Total number of symbols that are exported */
|
||||
#define PyGreenlet_API_pointers 12
|
||||
|
||||
#define PyGreenlet_Type_NUM 0
|
||||
#define PyExc_GreenletError_NUM 1
|
||||
#define PyExc_GreenletExit_NUM 2
|
||||
|
||||
#define PyGreenlet_New_NUM 3
|
||||
#define PyGreenlet_GetCurrent_NUM 4
|
||||
#define PyGreenlet_Throw_NUM 5
|
||||
#define PyGreenlet_Switch_NUM 6
|
||||
#define PyGreenlet_SetParent_NUM 7
|
||||
|
||||
#define PyGreenlet_MAIN_NUM 8
|
||||
#define PyGreenlet_STARTED_NUM 9
|
||||
#define PyGreenlet_ACTIVE_NUM 10
|
||||
#define PyGreenlet_GET_PARENT_NUM 11
|
||||
|
||||
#ifndef GREENLET_MODULE
|
||||
/* This section is used by modules that uses the greenlet C API */
|
||||
static void** _PyGreenlet_API = NULL;
|
||||
|
||||
# define PyGreenlet_Type \
|
||||
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
|
||||
|
||||
# define PyExc_GreenletError \
|
||||
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
|
||||
|
||||
# define PyExc_GreenletExit \
|
||||
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_New(PyObject *args)
|
||||
*
|
||||
* greenlet.greenlet(run, parent=None)
|
||||
*/
|
||||
# define PyGreenlet_New \
|
||||
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
|
||||
_PyGreenlet_API[PyGreenlet_New_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_GetCurrent(void)
|
||||
*
|
||||
* greenlet.getcurrent()
|
||||
*/
|
||||
# define PyGreenlet_GetCurrent \
|
||||
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_Throw(
|
||||
* PyGreenlet *greenlet,
|
||||
* PyObject *typ,
|
||||
* PyObject *val,
|
||||
* PyObject *tb)
|
||||
*
|
||||
* g.throw(...)
|
||||
*/
|
||||
# define PyGreenlet_Throw \
|
||||
(*(PyObject * (*)(PyGreenlet * self, \
|
||||
PyObject * typ, \
|
||||
PyObject * val, \
|
||||
PyObject * tb)) \
|
||||
_PyGreenlet_API[PyGreenlet_Throw_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
|
||||
*
|
||||
* g.switch(*args, **kwargs)
|
||||
*/
|
||||
# define PyGreenlet_Switch \
|
||||
(*(PyObject * \
|
||||
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
|
||||
_PyGreenlet_API[PyGreenlet_Switch_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
|
||||
*
|
||||
* g.parent = new_parent
|
||||
*/
|
||||
# define PyGreenlet_SetParent \
|
||||
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
|
||||
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
|
||||
|
||||
/*
|
||||
* PyGreenlet_GetParent(PyObject* greenlet)
|
||||
*
|
||||
* return greenlet.parent;
|
||||
*
|
||||
* This could return NULL even if there is no exception active.
|
||||
* If it does not return NULL, you are responsible for decrementing the
|
||||
* reference count.
|
||||
*/
|
||||
# define PyGreenlet_GetParent \
|
||||
(*(PyGreenlet* (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
|
||||
|
||||
/*
|
||||
* deprecated, undocumented alias.
|
||||
*/
|
||||
# define PyGreenlet_GET_PARENT PyGreenlet_GetParent
|
||||
|
||||
# define PyGreenlet_MAIN \
|
||||
(*(int (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_MAIN_NUM])
|
||||
|
||||
# define PyGreenlet_STARTED \
|
||||
(*(int (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_STARTED_NUM])
|
||||
|
||||
# define PyGreenlet_ACTIVE \
|
||||
(*(int (*)(PyGreenlet*)) \
|
||||
_PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
|
||||
|
||||
|
||||
|
||||
|
||||
/* Macro that imports greenlet and initializes C API */
|
||||
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
|
||||
keep the older definition to be sure older code that might have a copy of
|
||||
the header still works. */
|
||||
# define PyGreenlet_Import() \
|
||||
{ \
|
||||
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
|
||||
}
|
||||
|
||||
#endif /* GREENLET_MODULE */
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif /* !Py_GREENLETOBJECT_H */
|
||||
1
backend_flask/flask/lib64
Normal file
1
backend_flask/flask/lib64
Normal file
@ -0,0 +1 @@
|
||||
lib
|
||||
5
backend_flask/flask/pyvenv.cfg
Normal file
5
backend_flask/flask/pyvenv.cfg
Normal file
@ -0,0 +1,5 @@
|
||||
home = /usr/bin
|
||||
include-system-site-packages = false
|
||||
version = 3.12.3
|
||||
executable = /usr/bin/python3.12
|
||||
command = /usr/bin/python3 -m venv /home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask
|
||||
123
backend_flask/myapp/__init__.py
Normal file
123
backend_flask/myapp/__init__.py
Normal file
@ -0,0 +1,123 @@
|
||||
# backend/myapp/__init__.py
|
||||
|
||||
import sys
|
||||
import os
|
||||
from flask import Flask, jsonify
|
||||
|
||||
# --- 1. Import Extensions ---
|
||||
# Import specific extension instances defined in extensions.py
|
||||
# Avoid 'import *'
|
||||
try:
|
||||
from .extensions import mongo, cors # Add other extensions like jwt, ma if used
|
||||
except ImportError as e:
|
||||
print(f"Error importing extensions: {e}. Make sure extensions.py exists and defines instances.")
|
||||
# Provide dummy instances or raise an error if extensions are critical
|
||||
mongo = None
|
||||
cors = None
|
||||
|
||||
# --- 2. Import Default Config ---
|
||||
# Assumes config.py is in the parent 'backend' directory. Adjust if moved.
|
||||
try:
|
||||
# This relative import works if 'backend' is treated as a package or is in sys.path
|
||||
from .config import Config, config as config_options # Assuming config.py has a 'config' dict for selection
|
||||
except ImportError:
|
||||
print("Warning: Could not import default config from parent directory.")
|
||||
# Define a minimal fallback Config class
|
||||
class Config:
|
||||
SECRET_KEY = os.environ.get('SECRET_KEY') or 'a-default-fallback-secret-key'
|
||||
DEBUG = False
|
||||
config_options = {'default': Config}
|
||||
|
||||
|
||||
def create_app(config_name='default') -> Flask:
|
||||
"""
|
||||
Creates and configures the Flask application instance.
|
||||
Uses the Application Factory pattern.
|
||||
"""
|
||||
# === Step 1: Create Flask App ===
|
||||
# Enable loading from the instance/ folder relative to the 'backend' directory
|
||||
app = Flask(__name__, instance_relative_config=True)
|
||||
|
||||
# === Step 2: Load Configuration ===
|
||||
# Load default config based on config_name (if using different configs)
|
||||
selected_config = config_options.get(config_name, Config)
|
||||
app.config.from_object(selected_config)
|
||||
|
||||
# Load instance config (/instance/config.py) - Overrides defaults
|
||||
# silent=True prevents errors if the file doesn't exist
|
||||
app.config.from_pyfile('config.py', silent=True)
|
||||
|
||||
# === Step 3: Initialize Extensions ===
|
||||
if mongo:
|
||||
try:
|
||||
mongo.init_app(app)
|
||||
print("PyMongo initialized successfully.")
|
||||
except Exception as e:
|
||||
print(f"Error initializing PyMongo: {e}")
|
||||
|
||||
if cors:
|
||||
try:
|
||||
# Configure CORS using settings from app.config
|
||||
frontend_origin = "http://localhost:5173"
|
||||
cors.init_app(app, resources={r"/api/*": {"origins": app.config.get('FRONTEND_ORIGIN', '*')}}, supports_credentials=True)
|
||||
print("CORS initialized successfully.")
|
||||
except Exception as e:
|
||||
print(f"Error initializing CORS: {e}")
|
||||
|
||||
# if jwt:
|
||||
# try:
|
||||
# jwt.init_app(app)
|
||||
# print("JWTManager initialized successfully.")
|
||||
# except Exception as e:
|
||||
# print(f"Error initializing JWTManager: {e}")
|
||||
# Add init_app calls for other extensions (ma, migrate, etc.) here
|
||||
|
||||
# === Step 4: Register Blueprints ===
|
||||
# Use unique variable names and appropriate prefixes
|
||||
try:
|
||||
# Assuming each blueprint's __init__.py defines an object named 'bp'
|
||||
from .auth import bp as auth_bp # checked
|
||||
from .ai_services import bp as ai_services_bp
|
||||
from .activity import bp as activity_bp
|
||||
from .dialog import bp as dialog_bp
|
||||
from .projects import bp as projects_bp # checked
|
||||
from .urls import bp as urls_bp
|
||||
|
||||
# Register with potentially more specific prefixes
|
||||
app.register_blueprint(auth_bp, url_prefix='/api/auth')
|
||||
app.register_blueprint(ai_services_bp, url_prefix="/api/ai") # Changed prefix
|
||||
app.register_blueprint(activity_bp, url_prefix='/api/activity')
|
||||
app.register_blueprint(projects_bp, url_prefix='/api/projects')
|
||||
app.register_blueprint(dialog_bp, url_prefix="/api/dialog")
|
||||
app.register_blueprint(urls_bp, url_prefix="/api/urls")
|
||||
print("Blueprints registered successfully.")
|
||||
|
||||
except (ModuleNotFoundError, ImportError) as e:
|
||||
print(f"Error importing or registering blueprints: {e}. Check blueprint structure and 'bp' variable names.")
|
||||
except Exception as e:
|
||||
print(f"An unexpected error occurred during blueprint registration: {e}")
|
||||
|
||||
|
||||
# === Step 5: Add Root Route (Optional) ===
|
||||
@app.route("/")
|
||||
def index():
|
||||
# You could add a check here to see if mongo connection is working
|
||||
db_status = "disconnected"
|
||||
if mongo:
|
||||
try:
|
||||
# The ismaster command is cheap and does not require auth.
|
||||
mongo.cx.admin.command('ismaster')
|
||||
db_status = "connected"
|
||||
except Exception:
|
||||
db_status = "connection error"
|
||||
return jsonify({"message": "Backend service is running!", "database_status": db_status})
|
||||
|
||||
# You can also add other app-wide error handlers here if needed
|
||||
@app.errorhandler(404)
|
||||
def page_not_found(e):
|
||||
return jsonify(error=str(e)), 404
|
||||
|
||||
print(f"App created with config: {config_name}")
|
||||
print(f"Instance path: {app.instance_path}") # Check instance path
|
||||
|
||||
return app
|
||||
11
backend_flask/myapp/activity/__init__.py
Normal file
11
backend_flask/myapp/activity/__init__.py
Normal file
@ -0,0 +1,11 @@
|
||||
# myapp/activity/__init__.py
|
||||
|
||||
from flask import Blueprint
|
||||
|
||||
# Define the Blueprint instance for the project activity module.
|
||||
bp = Blueprint('activity', __name__, url_prefix='/api/activity')
|
||||
|
||||
# Import the routes module for this blueprint.
|
||||
# This assumes your routes are defined in 'activity_routes.py'.
|
||||
# The import MUST come AFTER 'bp' is defined.
|
||||
from . import activity_routes
|
||||
297
backend_flask/myapp/activity/activity_routes.py
Normal file
297
backend_flask/myapp/activity/activity_routes.py
Normal file
@ -0,0 +1,297 @@
|
||||
# myapp/activity/activity_routes.py
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from flask import request, jsonify, current_app, has_app_context # Flask utilities
|
||||
from bson.objectid import ObjectId, InvalidId # For MongoDB ObjectIds
|
||||
from functools import wraps # Import wraps for dummy decorator
|
||||
|
||||
# --- Local Blueprint Import ---
|
||||
from . import bp # Import the 'bp' instance defined in the local __init__.py
|
||||
|
||||
# --- Shared Extensions and Utilities Imports ---
|
||||
try:
|
||||
from ..extensions import mongo # Import the initialized PyMongo instance
|
||||
from ..utils import token_required # Import the authentication decorator
|
||||
except ImportError:
|
||||
# Fallback or error handling if imports fail
|
||||
print("Warning: Could not import mongo or token_required in activity/activity_routes.py.")
|
||||
mongo = None
|
||||
# Define a dummy decorator if token_required is missing
|
||||
def token_required(f):
|
||||
@wraps(f) # Use wraps for better introspection
|
||||
def wrapper(*args, **kwargs):
|
||||
print("ERROR: token_required decorator is not available!")
|
||||
return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500
|
||||
return wrapper
|
||||
|
||||
# --- Schema Imports ---
|
||||
try:
|
||||
# Import the relevant schemas defined in schemas.py
|
||||
from ..schemas import ActivityCreateSchema, ActivitySchema
|
||||
from marshmallow import ValidationError
|
||||
except ImportError:
|
||||
print("Warning: Could not import Activity schemas or ValidationError in activity/activity_routes.py.")
|
||||
ActivityCreateSchema = None
|
||||
ActivitySchema = None
|
||||
ValidationError = None # Define ValidationError as None if import fails
|
||||
|
||||
# --- Helper to get logger safely ---
|
||||
def _get_logger():
|
||||
if has_app_context():
|
||||
return current_app.logger
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
# Note: Routes use paths relative to the '/api/activity' prefix defined in __init__.py.
|
||||
|
||||
@bp.route('/', methods=['POST']) # Path relative to blueprint prefix
|
||||
@token_required
|
||||
def create_activity(current_user):
|
||||
"""
|
||||
Create a new project activity log entry.
|
||||
Uses ActivityCreateSchema for input validation.
|
||||
Expects 'projectId', 'activityType', and optional 'message' in JSON payload.
|
||||
Verifies user has access to the project.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
logger.error("Invalid current_user object received in create_activity")
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in create_activity: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not ActivityCreateSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
# Get and validate JSON data using the schema
|
||||
json_data = request.get_json() or {}
|
||||
schema = ActivityCreateSchema()
|
||||
try:
|
||||
validated_data = schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Create activity validation failed: {err.messages}")
|
||||
# Return validation errors from Marshmallow
|
||||
return jsonify(err.messages), 422 # 422 Unprocessable Entity is appropriate
|
||||
|
||||
# Extract validated data
|
||||
project_id_str = validated_data['projectId'] # Already validated as ObjectId string by schema if validator is used
|
||||
activity_type = validated_data['activityType']
|
||||
message = validated_data.get('message', "") # Get optional message
|
||||
|
||||
try:
|
||||
# Convert projectId string to ObjectId (schema validator should ensure format)
|
||||
try:
|
||||
project_obj_id = ObjectId(project_id_str)
|
||||
except InvalidId:
|
||||
# This should ideally be caught by schema validation if using _validate_object_id
|
||||
logger.error(f"Schema validation passed but ObjectId conversion failed for: {project_id_str}")
|
||||
return jsonify({"message": "Invalid projectId format despite schema validation."}), 400
|
||||
|
||||
# --- Verify Project Access ---
|
||||
db = mongo.db
|
||||
project = db.projects.find_one({"_id": project_obj_id}, {"ownerId": 1, "collaborators": 1})
|
||||
if not project:
|
||||
return jsonify({"message": "Project not found."}), 404 # 404 Not Found
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
# 403 Forbidden - authenticated but not authorized for this project
|
||||
return jsonify({"message": "You do not have access to this project."}), 403
|
||||
|
||||
# --- Prepare and Insert Activity Log ---
|
||||
now = datetime.datetime.now(datetime.timezone.utc) # Use timezone-aware UTC time
|
||||
doc = {
|
||||
"projectId": project_obj_id,
|
||||
"userId": user_id, # Store the user who performed the activity
|
||||
"activityType": activity_type,
|
||||
"message": message,
|
||||
"createdAt": now
|
||||
# No updatedAt for activity logs usually
|
||||
}
|
||||
result = db.project_activity.insert_one(doc)
|
||||
|
||||
# Return success response with the ID of the new log entry
|
||||
return jsonify({
|
||||
"message": "Activity log created successfully.",
|
||||
"activity_id": str(result.inserted_id) # Convert ObjectId to string
|
||||
}), 201 # 201 Created status code
|
||||
|
||||
except KeyError: # Should be caught by token_required or initial check
|
||||
logger.error(f"User ID (_id) not found in token payload for create_activity.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating activity for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while creating the activity log."}), 500
|
||||
|
||||
|
||||
@bp.route('/', methods=['GET']) # Path relative to blueprint prefix
|
||||
@token_required
|
||||
def list_activity_logs(current_user):
|
||||
"""
|
||||
List activity logs for a specific project.
|
||||
Uses ActivitySchema for output serialization.
|
||||
Requires 'projectId' as a query parameter.
|
||||
Supports 'limit' and 'offset' for pagination.
|
||||
Verifies user has access to the project.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in list_activity_logs: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not ActivitySchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
try:
|
||||
# Get query parameters
|
||||
project_id_str = request.args.get("projectId", "")
|
||||
limit_str = request.args.get("limit", "20") # Default limit 20
|
||||
offset_str = request.args.get("offset", "0") # Default offset 0
|
||||
|
||||
# Validate and parse pagination parameters
|
||||
try:
|
||||
limit = max(int(limit_str), 1) # Ensure limit is at least 1
|
||||
except ValueError:
|
||||
limit = 20 # Default on parsing error
|
||||
try:
|
||||
offset = max(int(offset_str), 0) # Ensure offset is non-negative
|
||||
except ValueError:
|
||||
offset = 0 # Default on parsing error
|
||||
|
||||
# Project ID is required for listing logs
|
||||
if not project_id_str:
|
||||
return jsonify({"message": "Query parameter 'projectId' is required to list logs."}), 400
|
||||
|
||||
# Convert projectId string to ObjectId
|
||||
try:
|
||||
project_obj_id = ObjectId(project_id_str)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid projectId format in query parameter."}), 400
|
||||
|
||||
# --- Verify Project Access ---
|
||||
db = mongo.db
|
||||
project = db.projects.find_one({"_id": project_obj_id}, {"ownerId": 1, "collaborators": 1})
|
||||
if not project:
|
||||
return jsonify({"message": "Project not found."}), 404
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "You do not have access to this project's activity logs."}), 403
|
||||
|
||||
# --- Fetch Activity Logs ---
|
||||
cursor = db.project_activity.find(
|
||||
{"projectId": project_obj_id}
|
||||
).sort("createdAt", -1).skip(offset).limit(limit) # Sort newest first
|
||||
|
||||
# Convert cursor to list for serialization
|
||||
activity_docs = list(cursor)
|
||||
|
||||
# --- Serialize results using the schema ---
|
||||
# Instantiate schema for multiple documents
|
||||
output_schema = ActivitySchema(many=True)
|
||||
# Use dump() to serialize the list of documents
|
||||
# Schema handles ObjectId and datetime conversion
|
||||
serialized_result = output_schema.dump(activity_docs)
|
||||
|
||||
# Return the serialized list of activity logs
|
||||
return jsonify({"activity_logs": serialized_result}), 200
|
||||
|
||||
except KeyError: # Should be caught by token_required or initial check
|
||||
logger.error(f"User ID (_id) not found in token payload for list_activity_logs.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing activity logs for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while listing activity logs."}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:activity_id>', methods=['DELETE']) # Path relative to blueprint prefix
|
||||
@token_required
|
||||
def delete_activity_log(current_user, activity_id):
|
||||
"""
|
||||
Delete a specific activity log entry by its ID.
|
||||
Requires the authenticated user to be the owner of the associated project.
|
||||
(No schema needed for input/output here)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in delete_activity_log: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
|
||||
try:
|
||||
# Validate activity_id format
|
||||
try:
|
||||
obj_activity_id = ObjectId(activity_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid activity log ID format."}), 400
|
||||
|
||||
db = mongo.db
|
||||
# --- Find Log and Verify Ownership via Project ---
|
||||
# Fetch projectId to check ownership
|
||||
activity_doc = db.project_activity.find_one({"_id": obj_activity_id}, {"projectId": 1})
|
||||
if not activity_doc:
|
||||
return jsonify({"message": "Activity log not found."}), 404
|
||||
|
||||
project_id = activity_doc.get("projectId")
|
||||
if not project_id or not isinstance(project_id, ObjectId):
|
||||
logger.error(f"Activity log {activity_id} is missing valid projectId.")
|
||||
return jsonify({"message": "Cannot verify ownership due to missing project reference."}), 500
|
||||
|
||||
project = db.projects.find_one({"_id": project_id}, {"ownerId": 1})
|
||||
if not project:
|
||||
logger.warning(f"Project {project_id} associated with activity log {activity_id} not found.")
|
||||
# Even if project is gone, maybe allow deleting orphan log? Or deny? Deny for safety.
|
||||
return jsonify({"message": "Associated project not found."}), 404
|
||||
|
||||
# Verify ownership (only project owner can delete logs in this implementation)
|
||||
owner_id = project.get("ownerId")
|
||||
if owner_id != user_id:
|
||||
return jsonify({"message": "You do not have permission to delete this activity log (must be project owner)."}), 403
|
||||
|
||||
# --- Perform Deletion ---
|
||||
result = db.project_activity.delete_one({"_id": obj_activity_id})
|
||||
|
||||
# --- Return Response ---
|
||||
if result.deleted_count == 1:
|
||||
return jsonify({"message": "Activity log deleted successfully."}), 200
|
||||
else:
|
||||
# Log was found but delete failed
|
||||
logger.warning(f"Activity log {activity_id} found but delete_one removed 0 documents.")
|
||||
return jsonify({"message": "Failed to delete activity log (already deleted?)."}), 404
|
||||
|
||||
except KeyError: # Should be caught by token_required or initial check
|
||||
logger.error(f"User ID (_id) not found in token payload for delete_activity_log.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting activity log {activity_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while deleting the activity log."}), 500
|
||||
|
||||
13
backend_flask/myapp/ai_services/__init__.py
Normal file
13
backend_flask/myapp/ai_services/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# myapp/api_keys/__init__.py
|
||||
|
||||
from flask import Blueprint
|
||||
|
||||
# Define the Blueprint instance for the API key management module.
|
||||
# 'api_keys' is the unique name for this blueprint.
|
||||
# url_prefix='/api/keys' will be prepended to all routes defined in this blueprint.
|
||||
bp = Blueprint('api_keys', __name__, url_prefix='/api/keys')
|
||||
|
||||
# Import the routes module.
|
||||
# This connects the routes defined in routes.py to the 'bp' instance.
|
||||
# This import MUST come AFTER the Blueprint 'bp' is defined.
|
||||
from . import ai_routes
|
||||
337
backend_flask/myapp/ai_services/ai_routes.py
Normal file
337
backend_flask/myapp/ai_services/ai_routes.py
Normal file
@ -0,0 +1,337 @@
|
||||
# myapp/ai_services/ai_routes.py
|
||||
# This file handles API Key management logic.
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from flask import request, jsonify, current_app, has_app_context # Flask utilities
|
||||
from bson.objectid import ObjectId, InvalidId # For MongoDB ObjectIds
|
||||
from functools import wraps # Import wraps for dummy decorator
|
||||
|
||||
# --- Local Blueprint Import ---
|
||||
from . import bp # Import the 'bp' instance defined in the local __init__.py
|
||||
|
||||
# --- Shared Extensions and Utilities Imports ---
|
||||
try:
|
||||
from ..extensions import mongo # Import the initialized PyMongo instance
|
||||
from ..utils import token_required # Import the authentication decorator
|
||||
except ImportError:
|
||||
# Fallback or error handling if imports fail
|
||||
print("Warning: Could not import mongo or token_required in ai_services/ai_routes.py.")
|
||||
mongo = None
|
||||
# Define a dummy decorator if token_required is missing
|
||||
def token_required(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
print("ERROR: token_required decorator is not available!")
|
||||
return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500
|
||||
return wrapper
|
||||
|
||||
# --- Schema Imports ---
|
||||
try:
|
||||
# Import the relevant schemas defined in schemas.py
|
||||
from ..schemas import APIKeyCreateSchema, APIKeyUpdateSchema, APIKeySchema
|
||||
from marshmallow import ValidationError
|
||||
except ImportError:
|
||||
print("Warning: Could not import APIKey schemas or ValidationError in ai_services/ai_routes.py.")
|
||||
APIKeyCreateSchema = None
|
||||
APIKeyUpdateSchema = None
|
||||
APIKeySchema = None
|
||||
ValidationError = None # Define ValidationError as None if import fails
|
||||
|
||||
# --- Helper to get logger safely ---
|
||||
def _get_logger():
|
||||
if has_app_context():
|
||||
return current_app.logger
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
# Note: Routes use paths relative to the '/api/ai' prefix.
|
||||
# Original '/api_list' becomes '/keys'
|
||||
# Original '/api_list/<api_id>' becomes '/keys/<api_id>'
|
||||
|
||||
@bp.route('/keys', methods=['GET']) # Path relative to blueprint prefix
|
||||
@token_required
|
||||
def list_api_keys(current_user):
|
||||
"""
|
||||
List all API keys belonging to the authenticated user.
|
||||
Uses APIKeySchema for output serialization.
|
||||
Fetches keys from the 'api_list' collection associated with the user's ID.
|
||||
Sorts by update time descending.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in list_api_keys: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not APIKeySchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
try:
|
||||
# Find all documents in the 'api_list' collection for this user
|
||||
db = mongo.db
|
||||
cursor = db.api_list.find({"uid": user_id}).sort("updatedAt", -1)
|
||||
api_key_docs = list(cursor) # Convert cursor to list
|
||||
|
||||
# --- Serialize results using the schema ---
|
||||
output_schema = APIKeySchema(many=True)
|
||||
# Schema handles ObjectId and datetime conversion, and field selection/exclusion
|
||||
# NOTE: APIKeySchema currently dumps the full key. Consider masking in schema if needed.
|
||||
serialized_result = output_schema.dump(api_key_docs)
|
||||
|
||||
return jsonify({"api_keys": serialized_result}), 200
|
||||
|
||||
except KeyError: # Should be caught by initial user_id check
|
||||
logger.error(f"User ID (_id) not found in token payload for list_api_keys.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing API keys for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while listing API keys."}), 500
|
||||
|
||||
|
||||
@bp.route('/keys', methods=['POST']) # Path relative to blueprint prefix
|
||||
@token_required
|
||||
def create_api_key(current_user):
|
||||
"""
|
||||
Create a new API key entry for the authenticated user.
|
||||
Uses APIKeyCreateSchema for input validation.
|
||||
Expects 'name', 'key', and optional 'selected' in JSON payload.
|
||||
Prevents duplicate names per user.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in create_api_key: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not APIKeyCreateSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
# Get and validate JSON data using the schema
|
||||
json_data = request.get_json() or {}
|
||||
schema = APIKeyCreateSchema()
|
||||
try:
|
||||
validated_data = schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Create API key validation failed: {err.messages}")
|
||||
return jsonify(err.messages), 422 # Return validation errors
|
||||
|
||||
# Extract validated data
|
||||
name = validated_data['name']
|
||||
api_key = validated_data['key']
|
||||
selected = validated_data['selected'] # Schema provides default if missing
|
||||
|
||||
try:
|
||||
# Check if an API key with the same name already exists for this user
|
||||
db = mongo.db
|
||||
existing = db.api_list.find_one({"uid": user_id, "name": name})
|
||||
if existing:
|
||||
# Return 409 Conflict status code for duplicates
|
||||
return jsonify({"message": f"User already has an API key for {name}."}), 409
|
||||
|
||||
# --- Prepare and Insert Document ---
|
||||
now = datetime.datetime.now(datetime.timezone.utc) # Use timezone-aware UTC time
|
||||
doc = {
|
||||
"uid": user_id, # Store user's ObjectId
|
||||
"name": name,
|
||||
"key": api_key, # Store the provided key
|
||||
"selected": selected, # Use validated boolean
|
||||
"createdAt": now,
|
||||
"updatedAt": now
|
||||
}
|
||||
result = db.api_list.insert_one(doc)
|
||||
|
||||
# Return success response with the ID of the newly created key
|
||||
return jsonify({
|
||||
"message": "API key created successfully.",
|
||||
"api_id": str(result.inserted_id) # Convert ObjectId to string
|
||||
}), 201 # 201 Created status code
|
||||
|
||||
except KeyError: # Should be caught by initial user_id check
|
||||
logger.error(f"User ID (_id) not found in token payload for create_api_key.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating API key for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while creating API key."}), 500
|
||||
|
||||
|
||||
@bp.route('/keys/<string:api_id>', methods=['PUT']) # Path relative to blueprint prefix
|
||||
@token_required
|
||||
def update_api_key(current_user, api_id):
|
||||
"""
|
||||
Update an existing API key identified by its ID.
|
||||
Uses APIKeyUpdateSchema for input validation.
|
||||
Allows updating 'name', 'key', and 'selected' fields.
|
||||
Verifies ownership before updating.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in update_api_key: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not APIKeyUpdateSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
# Get and validate JSON data using the schema
|
||||
json_data = request.get_json() or {}
|
||||
# Note: Update schema should not have required fields, so load won't fail if empty,
|
||||
# but we check if validated_data is empty later.
|
||||
schema = APIKeyUpdateSchema()
|
||||
try:
|
||||
validated_data = schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Update API key validation failed: {err.messages}")
|
||||
return jsonify(err.messages), 422
|
||||
|
||||
# If validation passed but no valid fields were provided
|
||||
if not validated_data:
|
||||
return jsonify({"message": "No valid fields provided for update."}), 400
|
||||
|
||||
try:
|
||||
# Validate api_id format from URL path
|
||||
try:
|
||||
object_id = ObjectId(api_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid API key ID format."}), 400
|
||||
|
||||
# Find the document to update
|
||||
db = mongo.db
|
||||
doc = db.api_list.find_one({"_id": object_id})
|
||||
if not doc:
|
||||
return jsonify({"message": "API key not found."}), 404 # 404 Not Found
|
||||
|
||||
# Verify that the authenticated user owns this API key
|
||||
doc_uid = doc.get("uid")
|
||||
if not doc_uid or doc_uid != user_id:
|
||||
# 403 Forbidden - user is authenticated but not authorized for this resource
|
||||
return jsonify({"message": "You do not have permission to update this API key."}), 403
|
||||
|
||||
# --- Prepare Update Fields based on validated data ---
|
||||
update_fields = {}
|
||||
if "name" in validated_data:
|
||||
new_name = validated_data["name"]
|
||||
# Check for name conflict only if name is actually changing
|
||||
if new_name != doc.get("name") and db.api_list.find_one({"uid": user_id, "name": new_name, "_id": {"$ne": object_id}}):
|
||||
return jsonify({"message": f"User already has another API key named {new_name}."}), 409 # Conflict
|
||||
update_fields["name"] = new_name
|
||||
|
||||
if "key" in validated_data:
|
||||
update_fields["key"] = validated_data["key"]
|
||||
|
||||
if "selected" in validated_data:
|
||||
update_fields["selected"] = validated_data["selected"] # Already boolean from schema
|
||||
|
||||
# If, after validation and processing, there's nothing to update (e.g., only invalid fields were sent)
|
||||
if not update_fields:
|
||||
return jsonify({"message": "No valid changes detected in the provided data."}), 400
|
||||
|
||||
# Always update the 'updatedAt' timestamp
|
||||
update_fields["updatedAt"] = datetime.datetime.now(datetime.timezone.utc)
|
||||
|
||||
# Perform the update operation in the database
|
||||
result = db.api_list.update_one(
|
||||
{"_id": object_id}, # Filter by ID
|
||||
{"$set": update_fields} # Set the new values
|
||||
)
|
||||
|
||||
# Check if the document was found and potentially modified
|
||||
if result.matched_count == 1:
|
||||
return jsonify({"message": "API key updated successfully."}), 200
|
||||
else:
|
||||
# This case should ideally not happen if find_one succeeded, but included for safety
|
||||
logger.warning(f"Update matched count was {result.matched_count} for api_id {api_id}")
|
||||
return jsonify({"message": "API key update failed (key not found after initial check)."}), 404
|
||||
|
||||
except KeyError: # Should be caught by initial user_id check
|
||||
logger.error(f"User ID (_id) not found in token payload for update_api_key.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating API key {api_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while updating the API key."}), 500
|
||||
|
||||
|
||||
@bp.route('/keys/<string:api_id>', methods=['DELETE']) # Path relative to blueprint prefix
|
||||
@token_required
|
||||
def delete_api_key(current_user, api_id):
|
||||
"""
|
||||
Delete an API key identified by its ID.
|
||||
Verifies ownership before deleting.
|
||||
(No schema needed for input/output here)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in delete_api_key: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
|
||||
try:
|
||||
# Validate api_id format from URL path
|
||||
try:
|
||||
object_id = ObjectId(api_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid API key ID format."}), 400
|
||||
|
||||
# Find the document to delete
|
||||
db = mongo.db
|
||||
doc = db.api_list.find_one({"_id": object_id}, {"uid": 1}) # Fetch only uid for check
|
||||
if not doc:
|
||||
return jsonify({"message": "API key not found."}), 404 # 404 Not Found
|
||||
|
||||
# Verify that the authenticated user owns this API key
|
||||
doc_uid = doc.get("uid")
|
||||
if not doc_uid or doc_uid != user_id:
|
||||
# 403 Forbidden
|
||||
return jsonify({"message": "You do not have permission to delete this API key."}), 403
|
||||
|
||||
# Perform the delete operation
|
||||
result = db.api_list.delete_one({"_id": object_id})
|
||||
|
||||
# Check if the deletion was successful
|
||||
if result.deleted_count == 1:
|
||||
return jsonify({"message": "API key deleted successfully."}), 200 # 200 OK or 204 No Content are suitable
|
||||
else:
|
||||
# This case means the document existed initially but couldn't be deleted
|
||||
logger.error(f"Failed to delete API key {api_id} despite finding it initially.")
|
||||
return jsonify({"message": "Failed to delete API key (already deleted?)."}), 404 # Or 500
|
||||
|
||||
except KeyError: # Should be caught by initial user_id check
|
||||
logger.error(f"User ID (_id) not found in token payload for delete_api_key.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting API key {api_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while deleting the API key."}), 500
|
||||
13
backend_flask/myapp/auth/__init__.py
Normal file
13
backend_flask/myapp/auth/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# myapp/auth/__init__.py
|
||||
|
||||
from flask import Blueprint
|
||||
|
||||
# Define the Blueprint instance for the authentication module.
|
||||
# 'auth' is the unique name for this blueprint.
|
||||
# url_prefix='/api/auth' will be prepended to all routes defined in this blueprint.
|
||||
bp = Blueprint('auth', __name__, url_prefix='/api/auth')
|
||||
|
||||
# Import the routes module.
|
||||
# This connects the routes defined in routes.py to the 'bp' instance.
|
||||
# This import MUST come AFTER the Blueprint 'bp' is defined to avoid circular imports.
|
||||
from . import auth_routes
|
||||
444
backend_flask/myapp/auth/auth_routes.py
Normal file
444
backend_flask/myapp/auth/auth_routes.py
Normal file
@ -0,0 +1,444 @@
|
||||
# myapp/auth/auth_routes.py
|
||||
|
||||
import datetime
|
||||
import jwt # For encoding JWT tokens
|
||||
import logging
|
||||
from flask import request, jsonify, current_app, has_app_context # Flask utilities
|
||||
from werkzeug.security import generate_password_hash, check_password_hash # For hashing and checking passwords
|
||||
from bson.objectid import ObjectId, InvalidId # For converting string IDs to MongoDB ObjectId
|
||||
from functools import wraps # Import wraps for dummy decorator
|
||||
|
||||
# --- Local Blueprint Import (Moved to Top) ---
|
||||
# Import the 'bp' instance defined in the local __init__.py FIRST
|
||||
# This often helps resolve circular import issues involving blueprints and utilities/models.
|
||||
from . import bp
|
||||
|
||||
|
||||
# --- Shared Utilities Import ---
|
||||
# Import the token_required decorator from the utils module
|
||||
try:
|
||||
# Assumes utils.py is in the parent 'myapp' package
|
||||
from ..utils import token_required
|
||||
except ImportError as e:
|
||||
# Fallback or error handling if the decorator isn't found
|
||||
print("Warning: token_required decorator not found in auth/auth_routes.py. Protected routes will fail.")
|
||||
print(e)
|
||||
# Define a dummy decorator to prevent NameError, but it won't protect routes
|
||||
def token_required(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
print("ERROR: token_required decorator is not available!")
|
||||
return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500
|
||||
return wrapper
|
||||
|
||||
|
||||
# --- Schema Imports ---
|
||||
try:
|
||||
# Import the relevant schemas defined in schemas.py
|
||||
from ..schemas import UserRegistrationSchema, UserLoginSchema, UserSchema, UserUpdateSchema
|
||||
from marshmallow import ValidationError
|
||||
except ImportError:
|
||||
print("Warning: Could not import User schemas or ValidationError in auth/auth_routes.py.")
|
||||
UserRegistrationSchema = None
|
||||
UserLoginSchema = None
|
||||
UserSchema = None
|
||||
UserUpdateSchema = None
|
||||
ValidationError = None
|
||||
|
||||
# --- Shared Extensions Import ---
|
||||
# Import mongo for direct use (alternative to current_app.mongo)
|
||||
try:
|
||||
from ..extensions import mongo
|
||||
except ImportError:
|
||||
print("Warning: Could not import mongo extension in auth/auth_routes.py.")
|
||||
mongo = None
|
||||
|
||||
|
||||
# --- Helper to get logger safely ---
|
||||
def _get_logger():
|
||||
if has_app_context():
|
||||
return current_app.logger
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
# Note: Routes use paths relative to the '/api/auth' prefix defined in __init__.py.
|
||||
|
||||
@bp.route('/register', methods=['POST'])
|
||||
def register():
|
||||
"""
|
||||
Register a new user.
|
||||
Uses UserRegistrationSchema for input validation.
|
||||
Expects 'username', 'email', 'password' in JSON payload.
|
||||
Checks for existing username/email. Hashes password. Stores user.
|
||||
Returns a JWT token and serialized user info (using UserSchema) upon success.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not UserRegistrationSchema or not UserSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
# Get and validate JSON data using the schema
|
||||
json_data = request.get_json() or {}
|
||||
schema = UserRegistrationSchema()
|
||||
try:
|
||||
validated_data = schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Registration validation failed: {err.messages}")
|
||||
return jsonify(err.messages), 422 # Return validation errors
|
||||
|
||||
# Extract validated data
|
||||
username = validated_data['username']
|
||||
email = validated_data['email']
|
||||
password = validated_data['password'] # Raw password (load_only)
|
||||
|
||||
try:
|
||||
db = mongo.db # Use imported mongo instance's db attribute
|
||||
# Check if username or email already exists
|
||||
if db.users.find_one({"username": username}):
|
||||
return jsonify({"message": "Username already exists."}), 409 # 409 Conflict
|
||||
if db.users.find_one({"email": email}):
|
||||
return jsonify({"message": "Email already registered."}), 409 # 409 Conflict
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or db attribute missing.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Database error checking existing user: {e}", exc_info=True)
|
||||
return jsonify({"message": "Database error during registration check."}), 500
|
||||
|
||||
# Hash the password before storing
|
||||
hashed_pw = generate_password_hash(password)
|
||||
|
||||
# Create the new user document
|
||||
now = datetime.datetime.now(datetime.timezone.utc) # Use timezone-aware UTC time
|
||||
new_user_doc = {
|
||||
"username": username,
|
||||
"email": email,
|
||||
"password": hashed_pw, # Store the hashed password
|
||||
"createdAt": now,
|
||||
"updatedAt": now
|
||||
}
|
||||
|
||||
# Insert the new user into the database
|
||||
try:
|
||||
result = db.users.insert_one(new_user_doc)
|
||||
user_id = result.inserted_id # This is an ObjectId
|
||||
# Fetch the created user document to serialize it
|
||||
created_user = db.users.find_one({"_id": user_id})
|
||||
if not created_user: # Should not happen, but check
|
||||
logger.error(f"Failed to retrieve user immediately after insertion: {user_id}")
|
||||
# Don't fail the whole registration, maybe just log and proceed without user data in response
|
||||
created_user = {"_id": user_id, "username": username, "email": email} # Construct manually if needed
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error inserting new user: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred during registration."}), 500
|
||||
|
||||
# Generate JWT token using settings from app config
|
||||
try:
|
||||
secret_key = current_app.config['SECRET_KEY']
|
||||
algo = current_app.config.get('JWT_ALGORITHM', 'HS256')
|
||||
exp_hours = current_app.config.get('JWT_EXP_DELTA_HOURS', 24)
|
||||
|
||||
token_payload = {
|
||||
"user_id": str(user_id), # Convert ObjectId to string for JWT payload
|
||||
"exp": datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(hours=exp_hours)
|
||||
}
|
||||
token = jwt.encode(token_payload, secret_key, algorithm=algo)
|
||||
|
||||
except KeyError:
|
||||
logger.error("SECRET_KEY not configured in Flask app for JWT.")
|
||||
return jsonify({"message": "Server configuration error: JWT secret missing."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error encoding JWT during registration: {e}", exc_info=True)
|
||||
return jsonify({"message": "Could not generate authentication token."}), 500
|
||||
|
||||
# Serialize the created user data using UserSchema (excludes password)
|
||||
output_schema = UserSchema()
|
||||
serialized_user = output_schema.dump(created_user)
|
||||
|
||||
# Return success response with token and serialized user info
|
||||
return jsonify({
|
||||
"message": "User registered successfully.",
|
||||
"token": token,
|
||||
"user": serialized_user # Return user object instead of just id
|
||||
}), 201 # 201 Created
|
||||
|
||||
|
||||
@bp.route('/login', methods=['POST'])
|
||||
def login():
|
||||
"""
|
||||
Log in an existing user.
|
||||
Uses UserLoginSchema for input validation.
|
||||
Expects 'username' and 'password' in JSON payload.
|
||||
Verifies credentials against the database.
|
||||
Returns a JWT token and serialized user info (using UserSchema) upon success.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not UserLoginSchema or not UserSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
# Get and validate JSON data using the schema
|
||||
json_data = request.get_json() or {}
|
||||
schema = UserLoginSchema()
|
||||
try:
|
||||
validated_data = schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Login validation failed: {err.messages}")
|
||||
return jsonify(err.messages), 422
|
||||
|
||||
username = validated_data['username']
|
||||
password = validated_data['password'] # Raw password (load_only)
|
||||
|
||||
# Access the database
|
||||
try:
|
||||
db = mongo.db
|
||||
if db is None: raise AttributeError("db attribute is None")
|
||||
# Find user by username
|
||||
user_doc = db.users.find_one({"username": username})
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly during login.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Database error during login for user {username}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred during login."}), 500
|
||||
|
||||
# Check if user exists and if the password hash matches
|
||||
if not user_doc or 'password' not in user_doc or not check_password_hash(user_doc["password"], password):
|
||||
return jsonify({"message": "Invalid credentials."}), 401 # Use 401 for authentication failure
|
||||
|
||||
# Generate JWT token using settings from app config
|
||||
try:
|
||||
user_id = user_doc["_id"] # Get ObjectId
|
||||
secret_key = current_app.config['SECRET_KEY']
|
||||
algo = current_app.config.get('JWT_ALGORITHM', 'HS256')
|
||||
exp_hours = current_app.config.get('JWT_EXP_DELTA_HOURS', 24)
|
||||
|
||||
token_payload = {
|
||||
"user_id": str(user_id), # Convert ObjectId to string
|
||||
"exp": datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(hours=exp_hours)
|
||||
}
|
||||
token = jwt.encode(token_payload, secret_key, algorithm=algo)
|
||||
|
||||
except KeyError:
|
||||
logger.error("SECRET_KEY not configured in Flask app for JWT.")
|
||||
return jsonify({"message": "Server configuration error: JWT secret missing."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error encoding JWT for user {username}: {e}", exc_info=True)
|
||||
return jsonify({"message": "Could not generate authentication token."}), 500
|
||||
|
||||
# Serialize the user data using UserSchema (excludes password)
|
||||
output_schema = UserSchema()
|
||||
serialized_user = output_schema.dump(user_doc)
|
||||
|
||||
# Return success response with token and serialized user info
|
||||
return jsonify({
|
||||
"message": "Login successful.",
|
||||
"token": token,
|
||||
"user": serialized_user # Return user object instead of just id
|
||||
}), 200
|
||||
|
||||
|
||||
@bp.route('/delete_account', methods=['DELETE'])
|
||||
@token_required # Apply the decorator to protect the route and inject 'current_user'
|
||||
def delete_account(current_user):
|
||||
"""
|
||||
Delete the account of the currently authenticated user (identified by token).
|
||||
Also handles associated data like projects and URLs.
|
||||
Requires a valid JWT token.
|
||||
(No schema needed for input/output here)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id_str = str(current_user.get("user_id") or current_user.get("_id"))
|
||||
if not user_id_str:
|
||||
return jsonify({"message": "Invalid token or user information not found in token."}), 401
|
||||
user_id = ObjectId(user_id_str) # Convert string ID back to ObjectId
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in delete_account from token data {current_user}: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
|
||||
try:
|
||||
db = mongo.db
|
||||
|
||||
# --- Data handling logic (remains the same) ---
|
||||
# [ Deletion logic for user, projects, urls, activity, dialogs ]
|
||||
# 1. Delete the user document itself
|
||||
user_result = db.users.delete_one({"_id": user_id})
|
||||
|
||||
# 2. Remove user from collaborator lists in projects they didn't own
|
||||
db.projects.update_many(
|
||||
{"ownerId": {"$ne": user_id}, "collaborators": user_id},
|
||||
{"$pull": {"collaborators": user_id}}
|
||||
)
|
||||
|
||||
# 3. Handle projects owned by the user
|
||||
owned_projects_cursor = db.projects.find({"ownerId": user_id}, {"_id": 1, "collaborators": 1})
|
||||
project_ids_to_delete = []
|
||||
projects_to_reassign = []
|
||||
|
||||
for project in owned_projects_cursor:
|
||||
project_id = project["_id"]
|
||||
collaborators = [collab_id for collab_id in project.get("collaborators", []) if collab_id != user_id]
|
||||
if collaborators:
|
||||
new_owner = collaborators[0]
|
||||
projects_to_reassign.append({
|
||||
"filter": {"_id": project_id},
|
||||
"update": {
|
||||
"$set": {"ownerId": new_owner, "lastActivityBy": new_owner},
|
||||
"$pull": {"collaborators": new_owner}
|
||||
}
|
||||
})
|
||||
else:
|
||||
project_ids_to_delete.append(project_id)
|
||||
|
||||
if projects_to_reassign:
|
||||
for reassignment in projects_to_reassign:
|
||||
db.projects.update_one(reassignment["filter"], reassignment["update"])
|
||||
logger.info(f"Reassigned ownership for {len(projects_to_reassign)} projects previously owned by {user_id_str}")
|
||||
|
||||
if project_ids_to_delete:
|
||||
delete_owned_projects_result = db.projects.delete_many({"_id": {"$in": project_ids_to_delete}})
|
||||
logger.info(f"Deleted {delete_owned_projects_result.deleted_count} projects owned by {user_id_str} with no remaining collaborators.")
|
||||
# Cascade deletes
|
||||
delete_urls_result = db.urls.delete_many({"projectId": {"$in": project_ids_to_delete}})
|
||||
logger.info(f"Deleted {delete_urls_result.deleted_count} URLs for deleted projects of user {user_id_str}")
|
||||
delete_activity_result = db.project_activity.delete_many({"projectId": {"$in": project_ids_to_delete}})
|
||||
logger.info(f"Deleted {delete_activity_result.deleted_count} activity logs for deleted projects of user {user_id_str}")
|
||||
delete_dialog_result = db.dialog_activity.delete_many({"projectId": {"$in": project_ids_to_delete}})
|
||||
logger.info(f"Deleted {delete_dialog_result.deleted_count} dialog sessions for deleted projects of user {user_id_str}")
|
||||
# --- End data handling logic ---
|
||||
|
||||
if user_result.deleted_count == 1:
|
||||
return jsonify({"message": "Account and associated data handled successfully."}), 200
|
||||
elif user_result.deleted_count == 0:
|
||||
return jsonify({"message": "User not found or already deleted."}), 404
|
||||
else:
|
||||
logger.warning(f"Unexpected deleted_count ({user_result.deleted_count}) for user {user_id}")
|
||||
return jsonify({"message": "An issue occurred during account deletion."}), 500
|
||||
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error during account deletion for user {user_id_str}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An internal error occurred during account deletion."}), 500
|
||||
|
||||
|
||||
@bp.route('/logout', methods=['POST'])
|
||||
@token_required # Ensures only logged-in users can call logout (though it's stateless)
|
||||
def logout(current_user):
|
||||
"""
|
||||
Logs out a user (stateless JWT). Client is responsible for discarding the token.
|
||||
(No schema needed for input/output here)
|
||||
"""
|
||||
return jsonify({"message": "Logout successful. Please discard your token."}), 200
|
||||
|
||||
|
||||
@bp.route('/account', methods=['PUT'])
|
||||
@token_required # Protect the route and get user info from token
|
||||
def update_account(current_user):
|
||||
"""
|
||||
Update the authenticated user's username, email, and/or password.
|
||||
Uses UserUpdateSchema for input validation.
|
||||
Expects JSON payload with optional 'username', 'email', 'password' fields.
|
||||
(Returns simple message, no schema needed for output)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id_str = str(current_user.get("_id") or current_user.get("user_id"))
|
||||
if not user_id_str:
|
||||
return jsonify({"message": "User ID not found in token."}), 401
|
||||
user_id = ObjectId(user_id_str)
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error from token ({current_user}) in update_account: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not UserUpdateSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
# Get and validate JSON data using the schema
|
||||
json_data = request.get_json() or {}
|
||||
schema = UserUpdateSchema()
|
||||
try:
|
||||
# Load validates optional fields based on schema rules
|
||||
validated_data = schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Update account validation failed: {err.messages}")
|
||||
return jsonify(err.messages), 422
|
||||
|
||||
# If validation passed but no valid fields were provided
|
||||
if not validated_data:
|
||||
return jsonify({"message": "No valid update fields provided (username, email, or password)."}), 400
|
||||
|
||||
db = mongo.db
|
||||
update_fields = {} # Dictionary to hold fields to be updated
|
||||
db_validation_errors = {} # Store potential db-level validation errors (like uniqueness)
|
||||
|
||||
# --- Validate uniqueness and prepare updates based on validated_data ---
|
||||
try:
|
||||
# Check username uniqueness if provided and validated
|
||||
if "username" in validated_data:
|
||||
new_username = validated_data["username"]
|
||||
if db.users.find_one({"username": new_username, "_id": {"$ne": user_id}}):
|
||||
db_validation_errors["username"] = "Username is already taken."
|
||||
else:
|
||||
update_fields["username"] = new_username
|
||||
|
||||
# Check email uniqueness if provided and validated
|
||||
if "email" in validated_data:
|
||||
new_email = validated_data["email"]
|
||||
if db.users.find_one({"email": new_email, "_id": {"$ne": user_id}}):
|
||||
db_validation_errors["email"] = "Email is already registered by another user."
|
||||
else:
|
||||
update_fields["email"] = new_email
|
||||
|
||||
# Hash password if provided and validated
|
||||
if "password" in validated_data:
|
||||
update_fields["password"] = generate_password_hash(validated_data["password"])
|
||||
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly during validation.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error during database validation for user {user_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred during data validation."}), 500
|
||||
|
||||
# If database validation errors occurred (e.g., uniqueness checks)
|
||||
if db_validation_errors:
|
||||
return jsonify({"message": "Validation errors occurred.", "errors": db_validation_errors}), 409 # 409 Conflict
|
||||
|
||||
# If there are fields to update, add the timestamp and perform the update
|
||||
if update_fields:
|
||||
update_fields["updatedAt"] = datetime.datetime.now(datetime.timezone.utc)
|
||||
try:
|
||||
result = db.users.update_one({"_id": user_id}, {"$set": update_fields})
|
||||
if result.matched_count == 0:
|
||||
# This case means the user_id from the token doesn't exist in the DB anymore
|
||||
return jsonify({"message": "User not found."}), 404
|
||||
# modified_count might be 0 if the provided data was the same as existing data
|
||||
# We consider it a success even if no fields were technically modified
|
||||
return jsonify({"message": "Account updated successfully."}), 200
|
||||
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly during update.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating account for user {user_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while updating the account."}), 500
|
||||
else:
|
||||
# This case should ideally not be reached due to the checks at the beginning,
|
||||
# but included for completeness if validation passed with no update fields.
|
||||
return jsonify({"message": "No changes were requested or fields were invalid."}), 400
|
||||
|
||||
31
backend_flask/myapp/config.py
Normal file
31
backend_flask/myapp/config.py
Normal file
@ -0,0 +1,31 @@
|
||||
import os
|
||||
import secrets
|
||||
|
||||
|
||||
|
||||
class Config:
|
||||
# MongoDB Atlas connection string: set it in your environment variables
|
||||
MONGO_URI: str = os.environ.get(
|
||||
"MONGO_URI",
|
||||
"mongodb+srv://surfsmart_server:IVV0mzUcwoEqHjNV@projectdatacluster.ki0t3z8.mongodb.net/surfsmart?retryWrites=true&w=majority&appName=ProjectDataCluster"
|
||||
)
|
||||
|
||||
# Flask secret key for sessions and JWT (use a secure value in production)
|
||||
SECRET_KEY: str = os.environ.get("SECRET_KEY", secrets.token_hex(32))
|
||||
|
||||
# JWT configuration
|
||||
JWT_ALGORITHM: str = "HS256"
|
||||
JWT_EXP_DELTA_HOURS: int = 2
|
||||
|
||||
# TODO make this name selectable
|
||||
GEMINI_MODEL_NAME = 'gemini-1.5-pro-latest'
|
||||
|
||||
# For celery
|
||||
CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379/0")
|
||||
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", "redis://localhost:6379/0")
|
||||
|
||||
|
||||
|
||||
config = {
|
||||
"default": Config()
|
||||
}
|
||||
13
backend_flask/myapp/dialog/__init__.py
Normal file
13
backend_flask/myapp/dialog/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# myapp/dialog/__init__.py
|
||||
|
||||
from flask import Blueprint
|
||||
|
||||
# Define the Blueprint instance for the dialog module.
|
||||
# 'dialog' is the unique name for this blueprint.
|
||||
# url_prefix='/api/dialog' will be prepended to all routes defined in this blueprint.
|
||||
bp = Blueprint('dialog', __name__, url_prefix='/api/dialog')
|
||||
|
||||
# Import the routes module.
|
||||
# This connects the routes defined in routes.py (including helper functions) to the 'bp' instance.
|
||||
# This import MUST come AFTER the Blueprint 'bp' is defined.
|
||||
from . import dialog_routes
|
||||
787
backend_flask/myapp/dialog/dialog_routes.py
Normal file
787
backend_flask/myapp/dialog/dialog_routes.py
Normal file
@ -0,0 +1,787 @@
|
||||
# myapp/dialog/dialog_routes.py
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import logging
|
||||
from flask import request, jsonify, current_app, has_app_context # Flask utilities
|
||||
from bson.objectid import ObjectId, InvalidId # For MongoDB ObjectIds
|
||||
from functools import wraps # Import wraps for dummy decorator
|
||||
|
||||
|
||||
# --- Local Blueprint Import ---
|
||||
from . import bp # Import the 'bp' instance defined in the local __init__.py
|
||||
|
||||
# --- Shared Extensions and Utilities Imports ---
|
||||
try:
|
||||
from ..extensions import mongo # Import the initialized PyMongo instance
|
||||
from ..utils import token_required # Import the authentication decorator
|
||||
except ImportError:
|
||||
# Fallback or error handling if imports fail
|
||||
print("Warning: Could not import mongo or token_required in dialog/dialog_routes.py.")
|
||||
mongo = None
|
||||
# Define a dummy decorator if token_required is missing
|
||||
def token_required(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
print("ERROR: token_required decorator is not available!")
|
||||
return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500
|
||||
return wrapper
|
||||
|
||||
# --- Schema Imports ---
|
||||
try:
|
||||
# Import the relevant schemas defined in schemas.py
|
||||
from ..schemas import (
|
||||
DialogCreateSchema, DialogSendMessageSchema,
|
||||
DialogSchema, DialogSummarySchema
|
||||
)
|
||||
from marshmallow import ValidationError
|
||||
except ImportError:
|
||||
print("Warning: Could not import Dialog schemas or ValidationError in dialog/dialog_routes.py.")
|
||||
DialogCreateSchema = None
|
||||
DialogSendMessageSchema = None
|
||||
DialogSchema = None
|
||||
DialogSummarySchema = None
|
||||
ValidationError = None # Define ValidationError as None if import fails
|
||||
|
||||
|
||||
# --- External API and Langchain Imports ---
|
||||
# Keep these imports conditional to avoid errors if libraries are not installed
|
||||
try:
|
||||
import google.generativeai as genai
|
||||
from google.api_core import exceptions as google_exceptions
|
||||
except ImportError:
|
||||
print("Warning: google.generativeai not installed. Gemini functionality will fail.")
|
||||
genai = None
|
||||
google_exceptions = None
|
||||
|
||||
try:
|
||||
from langchain_community.embeddings import OpenAIEmbeddings
|
||||
from langchain_community.vectorstores import FAISS
|
||||
from langchain.docstore.document import Document
|
||||
except ImportError:
|
||||
print("Warning: Langchain components not installed. Vector store functionality will fail.")
|
||||
OpenAIEmbeddings = None
|
||||
FAISS = None
|
||||
Document = None
|
||||
|
||||
# --- Constants ---
|
||||
# Adjust model name if needed, potentially move to config
|
||||
MAX_HISTORY_MESSAGES = 20 # Max messages to keep in history for context (if applicable, not used in current Gemini call)
|
||||
|
||||
# --- Helper to get logger safely ---
|
||||
def _get_logger():
|
||||
if has_app_context():
|
||||
return current_app.logger
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
|
||||
##################################################
|
||||
# Helper Functions (kept within this module)
|
||||
##################################################
|
||||
|
||||
# --- Knowledge Base Helpers ---
|
||||
# (generate_knowledge_base_message, update_project_knowledge, process_api_response_and_update_knowledge - unchanged from previous version)
|
||||
def generate_knowledge_base_message(project_id):
|
||||
"""
|
||||
Retrieves all URL documents for the given project and formats them
|
||||
into a single knowledge base message string for context.
|
||||
|
||||
Args:
|
||||
project_id (ObjectId): The ObjectId of the project.
|
||||
|
||||
Returns:
|
||||
str: A formatted string containing knowledge base entries, or an empty string on error.
|
||||
Returns "No external knowledge available." if no URLs are found.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
if not mongo:
|
||||
logger.error("Mongo extension not available in generate_knowledge_base_message.")
|
||||
return "" # Cannot proceed without DB connection
|
||||
|
||||
try:
|
||||
# Ensure project_id is ObjectId
|
||||
if not isinstance(project_id, ObjectId):
|
||||
project_id = ObjectId(project_id)
|
||||
|
||||
# Find all URL documents linked to the project ID
|
||||
urls_cursor = mongo.db.urls.find({"projectId": project_id})
|
||||
knowledge_entries = []
|
||||
for doc in urls_cursor:
|
||||
# Format keywords with percentages
|
||||
keywords_list = [f"{kw.get('word', '')}({kw.get('percentage', 'N/A')}%)" for kw in doc.get("keywords", [])]
|
||||
# Aggregate relevant fields into a string entry
|
||||
aggregated = (
|
||||
f"Title: {doc.get('title', 'N/A')}\n"
|
||||
f"URL: {doc.get('url', 'N/A')}\n"
|
||||
# f"Starred: {doc.get('starred', False)}\n" # Optionally include starred status
|
||||
f"Note: {doc.get('note', 'N/A')}\n"
|
||||
f"Keywords: {', '.join(keywords_list) if keywords_list else 'N/A'}\n"
|
||||
f"Summary: {doc.get('summary', 'N/A')}"
|
||||
)
|
||||
knowledge_entries.append(aggregated)
|
||||
|
||||
# Handle case where no URLs are found
|
||||
if not knowledge_entries:
|
||||
return "No external knowledge available for this project."
|
||||
|
||||
# Combine entries and truncate if necessary
|
||||
combined = "\n\n---\n\n".join(knowledge_entries)
|
||||
# Use Flask config for max length if available, otherwise default
|
||||
max_length = current_app.config.get('KNOWLEDGE_BASE_MAX_LENGTH', 4000) if has_app_context() else 4000
|
||||
if len(combined) > max_length:
|
||||
combined = combined[:max_length] + " ... [truncated]"
|
||||
return combined
|
||||
|
||||
except InvalidId:
|
||||
logger.error(f"Invalid project_id format passed to generate_knowledge_base_message: {project_id}")
|
||||
return "Error: Invalid project identifier."
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or available.")
|
||||
return "Error: Database configuration issue."
|
||||
except Exception as e:
|
||||
# Log the error with project ID for easier debugging
|
||||
logger.error(f"Error generating knowledge base message for project {project_id}: {e}", exc_info=True)
|
||||
return "" # Return empty string on generic error
|
||||
|
||||
|
||||
def update_project_knowledge(project_id):
|
||||
"""
|
||||
Updates the project's 'summary' field with a condensed version of its knowledge base.
|
||||
This acts as a cache or snapshot for quick reference.
|
||||
|
||||
Args:
|
||||
project_id (ObjectId): The ObjectId of the project.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
if not mongo:
|
||||
logger.error("Mongo extension not available in update_project_knowledge.")
|
||||
return
|
||||
try:
|
||||
# Ensure project_id is ObjectId
|
||||
if not isinstance(project_id, ObjectId):
|
||||
project_id = ObjectId(project_id)
|
||||
|
||||
knowledge_message = generate_knowledge_base_message(project_id)
|
||||
# Condense the message for storage (e.g., first 1000 chars)
|
||||
condensed = knowledge_message[:1000] if len(knowledge_message) > 1000 else knowledge_message
|
||||
|
||||
# Update the project document in the 'projects' collection
|
||||
mongo.db.projects.update_one(
|
||||
{"_id": project_id},
|
||||
{"$set": {"summary": condensed, "updatedAt": datetime.datetime.now(datetime.timezone.utc)}}
|
||||
)
|
||||
except InvalidId:
|
||||
logger.error(f"Invalid project_id format passed to update_project_knowledge: {project_id}")
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or available.")
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating project knowledge cache for {project_id}: {e}", exc_info=True)
|
||||
|
||||
|
||||
def process_api_response_and_update_knowledge(api_response, project_id):
|
||||
"""
|
||||
Placeholder function to process LLM responses. Currently updates project knowledge cache.
|
||||
"""
|
||||
# For now, simply update the cached summary in the project document
|
||||
update_project_knowledge(project_id)
|
||||
# Future enhancements could go here
|
||||
|
||||
|
||||
# --- Vector Store Helpers ---
|
||||
def build_vector_knowledge_base(project_id, query, k=3):
|
||||
"""
|
||||
Builds a vector index (FAISS) from project URL content and retrieves top-k relevant documents.
|
||||
|
||||
Args:
|
||||
project_id (ObjectId): The ObjectId of the project.
|
||||
query (str): The user query for similarity search.
|
||||
k (int): The number of top similar documents to retrieve.
|
||||
|
||||
Returns:
|
||||
List[Document]: A list of LangChain Document objects, or an empty list on error/no data.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Check if necessary components are available
|
||||
if not mongo or not OpenAIEmbeddings or not FAISS or not Document:
|
||||
logger.error("Missing dependencies (Mongo, Langchain) for build_vector_knowledge_base.")
|
||||
return []
|
||||
|
||||
try:
|
||||
# Ensure project_id is ObjectId
|
||||
if not isinstance(project_id, ObjectId):
|
||||
project_id = ObjectId(project_id)
|
||||
|
||||
# Fetch URL documents from MongoDB
|
||||
urls_cursor = mongo.db.urls.find({"projectId": project_id})
|
||||
texts = []
|
||||
metadatas = []
|
||||
|
||||
for doc in urls_cursor:
|
||||
# Aggregate text content for embedding
|
||||
keywords_list = [f"{kw.get('word', '')}({kw.get('percentage', 'N/A')}%)" for kw in doc.get("keywords", [])]
|
||||
aggregated = (
|
||||
f"Title: {doc.get('title', 'N/A')}\n"
|
||||
f"URL: {doc.get('url', 'N/A')}\n"
|
||||
# f"Starred: {doc.get('starred', False)}\n" # Optionally include more fields
|
||||
f"Note: {doc.get('note', 'N/A')}\n"
|
||||
f"Keywords: {', '.join(keywords_list) if keywords_list else 'N/A'}\n"
|
||||
f"Summary: {doc.get('summary', 'N/A')}"
|
||||
)
|
||||
texts.append(aggregated)
|
||||
# Store relevant metadata alongside the text
|
||||
metadatas.append({"url": doc.get("url", ""), "title": doc.get("title", ""), "doc_id": str(doc["_id"])})
|
||||
|
||||
# If no text content found, return empty list
|
||||
if not texts:
|
||||
logger.info(f"No URL text content found for project {project_id} to build vector base.")
|
||||
return []
|
||||
|
||||
# Initialize embeddings model (ensure OPENAI_API_KEY is set in environment or config)
|
||||
try:
|
||||
# Check if OPENAI_API_KEY exists (more robust check)
|
||||
openai_api_key = os.environ.get("OPENAI_API_KEY") or (current_app.config.get("OPENAI_API_KEY") if has_app_context() else None)
|
||||
if not openai_api_key:
|
||||
raise ValueError("OPENAI_API_KEY environment variable or Flask config not set.")
|
||||
embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize OpenAIEmbeddings: {e}. Check API key.", exc_info=False) # Avoid logging key
|
||||
return []
|
||||
|
||||
|
||||
# Build FAISS vector store from the texts and metadata
|
||||
vectorstore = FAISS.from_texts(texts=texts, embedding=embeddings, metadatas=metadatas)
|
||||
|
||||
# Perform similarity search
|
||||
docs = vectorstore.similarity_search(query, k=k)
|
||||
return docs
|
||||
|
||||
except InvalidId:
|
||||
logger.error(f"Invalid project_id format passed to build_vector_knowledge_base: {project_id}")
|
||||
return []
|
||||
except AttributeError:
|
||||
logger.error("PyMongo or Langchain components not initialized or available.")
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"Error building vector knowledge base for project {project_id}: {e}", exc_info=True)
|
||||
return [] # Return empty list on error
|
||||
|
||||
|
||||
def build_vector_based_prompt_with_knowledge(user_message, project_id):
|
||||
"""
|
||||
Constructs a prompt for the LLM, incorporating context from vector search results.
|
||||
|
||||
Args:
|
||||
user_message (str): The user's latest message/query.
|
||||
project_id (ObjectId): The ObjectId of the project.
|
||||
|
||||
Returns:
|
||||
str: The formatted prompt string including retrieved knowledge.
|
||||
"""
|
||||
# Retrieve top 3 relevant documents using vector search
|
||||
retrieved_docs = build_vector_knowledge_base(project_id, user_message, k=3)
|
||||
|
||||
# Format the retrieved knowledge for inclusion in the prompt
|
||||
if retrieved_docs:
|
||||
# Join the page_content of each retrieved LangChain Document
|
||||
knowledge_text = "\n\n---\n\n".join([f"Source URL: {doc.metadata.get('url', 'N/A')}\n{doc.page_content}" for doc in retrieved_docs])
|
||||
else:
|
||||
knowledge_text = "No relevant external knowledge found via vector search for this query."
|
||||
|
||||
# Construct the final prompt with instructions, knowledge, and user query
|
||||
# Make prompt more specific about using ONLY the provided knowledge
|
||||
prompt = (
|
||||
"You are an expert research assistant. Analyze the following retrieved documents, which contain information "
|
||||
"(titles, URLs, notes, keywords, summaries) from websites related to the current research project. "
|
||||
"Base your response *only* on this provided information and the user's query.\n\n"
|
||||
"Common user questions might involve:\n"
|
||||
"- Summarizing key topics from the retrieved documents.\n"
|
||||
"- Suggesting research directions based *only* on the retrieved documents.\n"
|
||||
"- Recommending specific URLs *from the retrieved documents* that are most relevant.\n"
|
||||
"- Identifying potentially redundant information *within the retrieved documents*.\n\n"
|
||||
"--- Relevant Retrieved Knowledge ---\n"
|
||||
f"{knowledge_text}\n"
|
||||
"--- End Retrieved Knowledge ---\n\n"
|
||||
"User Query:\n"
|
||||
f"{user_message}\n\n"
|
||||
"Based strictly on the retrieved knowledge and the user query, provide your analysis and recommendations:"
|
||||
)
|
||||
return prompt
|
||||
|
||||
|
||||
# --- Gemini Message Formatting Helper (Not currently used by send_dialog_message) ---
|
||||
def format_messages_for_gemini(db_messages, max_history=MAX_HISTORY_MESSAGES):
|
||||
"""
|
||||
Converts dialog history from DB format to Gemini API format.
|
||||
Handles role mapping ('system' -> 'model') and ensures role alternation.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
contents = []
|
||||
last_role = None
|
||||
recent_messages = db_messages[-max_history:] # Get the most recent messages
|
||||
|
||||
for msg in recent_messages:
|
||||
gemini_role = "model" if msg.get("role") == "system" else "user"
|
||||
if gemini_role == last_role:
|
||||
logger.warning(f"Skipping consecutive message of role '{gemini_role}' in formatting.")
|
||||
continue
|
||||
contents.append({
|
||||
"role": gemini_role,
|
||||
"parts": [{"text": msg.get("content", "")}]
|
||||
})
|
||||
last_role = gemini_role
|
||||
|
||||
if contents and contents[-1]["role"] != "user":
|
||||
logger.warning("Formatted history for Gemini does not end with a 'user' message.")
|
||||
return contents
|
||||
|
||||
|
||||
##################################################
|
||||
# Dialog API Endpoints
|
||||
##################################################
|
||||
|
||||
# Note: Routes use paths relative to the '/api/dialog' prefix.
|
||||
|
||||
@bp.route('/', methods=['POST']) # Path relative to prefix
|
||||
@token_required
|
||||
def create_dialog_session(current_user):
|
||||
"""
|
||||
Creates a new dialog session associated with a project.
|
||||
Uses DialogCreateSchema for input validation.
|
||||
Expects JSON: { "projectId": "<ObjectId_string>", "sessionId": "<optional_string>", "startMessage": "<optional_string>" }
|
||||
Determines the LLM provider based on the user's selected API key.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in create_dialog_session: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not DialogCreateSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
# Get and validate JSON data using the schema
|
||||
json_data = request.get_json() or {}
|
||||
schema = DialogCreateSchema()
|
||||
try:
|
||||
validated_data = schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Create dialog session validation failed: {err.messages}")
|
||||
return jsonify(err.messages), 422 # Return validation errors
|
||||
|
||||
# Extract validated data
|
||||
project_id_str = validated_data['projectId'] # Already validated as ObjectId string by schema
|
||||
session_id = validated_data.get("sessionId", "") # Optional
|
||||
start_message = validated_data.get("startMessage", "").strip() # Optional
|
||||
|
||||
try:
|
||||
# Convert project ID
|
||||
project_obj_id = ObjectId(project_id_str) # Conversion should succeed due to schema validation
|
||||
|
||||
# Find the user's selected API key
|
||||
db = mongo.db
|
||||
selected_api = db.api_list.find_one({"uid": user_id, "selected": True})
|
||||
if not selected_api:
|
||||
return jsonify({"message": "User has no selected API provider. Please select one in API Keys."}), 400
|
||||
|
||||
provider = selected_api.get("name")
|
||||
api_key_exists = bool(selected_api.get("key")) # Check if key value exists
|
||||
|
||||
# Validate provider and key presence
|
||||
allowed_providers = ["Gemini", "Deepseek", "Chatgpt"] # Consider from config
|
||||
if provider not in allowed_providers:
|
||||
return jsonify({"message": f"Selected provider '{provider}' is not supported."}), 400
|
||||
if not api_key_exists:
|
||||
return jsonify({"message": f"API key value missing for selected provider '{provider}'."}), 400
|
||||
|
||||
# Verify project exists and user has access
|
||||
project = db.projects.find_one({"_id": project_obj_id}, {"ownerId": 1, "collaborators": 1})
|
||||
if not project:
|
||||
return jsonify({"message": "Project not found."}), 404
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "Access denied to the specified project."}), 403
|
||||
|
||||
# Prepare initial messages if startMessage exists
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
messages_array = []
|
||||
if start_message:
|
||||
messages_array.append({
|
||||
"role": "user",
|
||||
"content": start_message,
|
||||
"timestamp": now # Store timestamp for messages
|
||||
})
|
||||
|
||||
# Prepare the new dialog document
|
||||
dialog_doc = {
|
||||
"uid": user_id,
|
||||
"projectId": project_obj_id,
|
||||
"provider": provider, # Store the provider used for this session
|
||||
"sessionStartedAt": now,
|
||||
"sessionEndedAt": None, # Mark as null initially
|
||||
"messages": messages_array
|
||||
}
|
||||
if session_id: dialog_doc["sessionId"] = session_id
|
||||
|
||||
# Insert the new dialog session
|
||||
result = db.dialog_activity.insert_one(dialog_doc)
|
||||
|
||||
# Return success response with the new dialog ID
|
||||
return jsonify({
|
||||
"message": "Dialog session created successfully.",
|
||||
"dialog_id": str(result.inserted_id)
|
||||
}), 201
|
||||
|
||||
except KeyError: # Should be caught by initial user_id check
|
||||
logger.error(f"User ID (_id) not found in token payload for create_dialog_session.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating dialog session for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "Internal server error creating dialog session."}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:dialog_id>/send', methods=['POST']) # Path relative to prefix
|
||||
@token_required
|
||||
def send_dialog_message(current_user, dialog_id):
|
||||
"""
|
||||
Sends a user message within a specific dialog session.
|
||||
Uses DialogSendMessageSchema for input validation.
|
||||
Expects JSON: { "content": "User's message text" }
|
||||
Retrieves context using vector search, builds a prompt, calls the LLM (Gemini),
|
||||
and stores the conversation turn in the dialog history.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not genai or not google_exceptions: return jsonify({"message": "Gemini API library not available."}), 500
|
||||
if not DialogSendMessageSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
try:
|
||||
# Validate IDs
|
||||
user_id_str = str(current_user.get("_id"))
|
||||
if not user_id_str: return jsonify({"message": "Missing user ID in token."}), 400
|
||||
try:
|
||||
user_id = ObjectId(user_id_str)
|
||||
dialog_obj_id = ObjectId(dialog_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid user or dialog ID format."}), 400
|
||||
|
||||
# Get and validate user message content using schema
|
||||
json_data = request.get_json() or {}
|
||||
schema = DialogSendMessageSchema()
|
||||
try:
|
||||
validated_data = schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Send dialog message validation failed: {err.messages}")
|
||||
return jsonify(err.messages), 422
|
||||
|
||||
content = validated_data['content'] # Use validated content
|
||||
|
||||
# --- Retrieve Dialog and API Key ---
|
||||
db = mongo.db
|
||||
dialog_doc = db.dialog_activity.find_one({"_id": dialog_obj_id, "uid": user_id})
|
||||
if not dialog_doc: return jsonify({"message": "Dialog session not found or access denied."}), 404
|
||||
if dialog_doc.get("sessionEndedAt"): return jsonify({"message": "This dialog session has ended."}), 409 # 409 Conflict
|
||||
|
||||
provider = dialog_doc.get("provider")
|
||||
if provider != "Gemini": # This endpoint currently only supports Gemini
|
||||
return jsonify({"message": f"This endpoint only supports 'Gemini', but session provider is '{provider}'."}), 400
|
||||
|
||||
# Find the active Gemini API key for the user
|
||||
api_doc = db.api_list.find_one({"uid": user_id, "name": "Gemini", "selected": True})
|
||||
if not (api_doc and api_doc.get("key")):
|
||||
logger.error(f"No valid Gemini key found or selected for user {user_id} during send message.")
|
||||
return jsonify({"message": "Gemini API key not configured or selected."}), 400
|
||||
gemini_key = api_doc["key"]
|
||||
|
||||
# --- Build Prompt with Vector Knowledge ---
|
||||
project_id = dialog_doc.get("projectId")
|
||||
if not project_id or not isinstance(project_id, ObjectId):
|
||||
logger.error(f"Dialog {dialog_id} is missing valid projectId.")
|
||||
return jsonify({"message": "Internal error: Project reference missing."}), 500
|
||||
|
||||
# This builds the prompt incorporating vector search results
|
||||
detailed_prompt = build_vector_based_prompt_with_knowledge(content, project_id)
|
||||
|
||||
# Prepare history for Gemini (currently just the detailed prompt as a single user turn)
|
||||
gemini_history = [{"role": "user", "parts": [{"text": detailed_prompt}]}]
|
||||
|
||||
# --- Call Gemini API ---
|
||||
llm_response_text = "[LLM Call Skipped/Failed]" # Default response text
|
||||
try:
|
||||
genai.configure(api_key=gemini_key)
|
||||
model = genai.GenerativeModel(current_app.config["GEMINI_MODEL_NAME"])
|
||||
# Consider adding generation_config and safety_settings from Flask config
|
||||
llm_response = model.generate_content(gemini_history)
|
||||
|
||||
# Extract text, handling potential blocks or empty responses
|
||||
try:
|
||||
llm_response_text = llm_response.text
|
||||
except ValueError:
|
||||
logger.warning(f"Gemini response for dialog {dialog_id} may have been blocked or empty. Feedback: {llm_response.prompt_feedback}")
|
||||
llm_response_text = "[Response blocked by safety filters or returned no text content]"
|
||||
|
||||
except google_exceptions.PermissionDenied as ex:
|
||||
logger.warning(f"Gemini Permission Denied for user {user_id}: {ex}")
|
||||
return jsonify({"message": "Gemini API Error: Invalid API key or insufficient permissions."}), 403
|
||||
except google_exceptions.ResourceExhausted as ex:
|
||||
logger.warning(f"Gemini Resource Exhausted for user {user_id}: {ex}")
|
||||
return jsonify({"message": "Gemini API Error: Rate limit or quota exceeded."}), 429
|
||||
except google_exceptions.GoogleAPIError as ex: # Catch other Google API errors
|
||||
logger.error(f"Gemini API communication error for user {user_id}: {ex}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while communicating with the Gemini API."}), 503 # 503 Service Unavailable
|
||||
except Exception as e: # Catch potential genai configuration errors etc.
|
||||
logger.error(f"Unexpected error during Gemini call setup or execution for user {user_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "Internal server error during LLM communication."}), 500
|
||||
|
||||
|
||||
# --- Process Response and Update DB ---
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
user_msg_entry = {"role": "user", "content": content, "timestamp": now}
|
||||
system_msg_entry = {"role": "system", "content": llm_response_text, "timestamp": now} # Use same timestamp for pair
|
||||
|
||||
# Add both messages to the dialog history in MongoDB atomically
|
||||
update_res = db.dialog_activity.update_one(
|
||||
{"_id": dialog_obj_id},
|
||||
{"$push": {"messages": {"$each": [user_msg_entry, system_msg_entry]}}}
|
||||
)
|
||||
if update_res.modified_count != 1:
|
||||
logger.warning(f"Dialog {dialog_id} DB update failed after LLM call (modified_count={update_res.modified_count}).")
|
||||
# Decide if this should be an error response to the user
|
||||
|
||||
# Process the response (e.g., update cached knowledge)
|
||||
process_api_response_and_update_knowledge(llm_response_text, project_id)
|
||||
|
||||
# Return the LLM's response text to the client
|
||||
return jsonify({"message": "LLM response received.", "llmResponse": llm_response_text}), 200
|
||||
|
||||
except KeyError: # Should be caught by initial user_id check
|
||||
logger.error(f"User ID (_id) not found in token payload for send_dialog_message.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo or other extension not initialized correctly.")
|
||||
return jsonify({"message": "Server configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in send_dialog_message for dialog {dialog_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "Internal server error processing message."}), 500
|
||||
|
||||
|
||||
@bp.route('/', methods=['GET']) # Path relative to prefix
|
||||
@token_required
|
||||
def list_dialog_sessions(current_user):
|
||||
"""
|
||||
Lists dialog sessions for the authenticated user.
|
||||
Uses DialogSummarySchema for output serialization.
|
||||
Supports filtering by 'projectId' query parameter.
|
||||
Excludes the 'messages' array for brevity.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not DialogSummarySchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
try:
|
||||
# Validate user ID
|
||||
user_id_str = str(current_user.get("_id"))
|
||||
if not user_id_str: return jsonify({"message": "Missing user ID in token."}), 400
|
||||
try:
|
||||
user_id = ObjectId(user_id_str)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Base query for the user's dialogs
|
||||
query = {"uid": user_id}
|
||||
|
||||
# Add projectId filter if provided in query parameters
|
||||
project_id_str = request.args.get("projectId")
|
||||
if project_id_str:
|
||||
try:
|
||||
project_obj_id = ObjectId(project_id_str)
|
||||
query["projectId"] = project_obj_id
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid projectId format in query parameter."}), 400
|
||||
|
||||
# Fetch dialogs, excluding the messages field, sort by start time descending
|
||||
db = mongo.db
|
||||
cursor = db.dialog_activity.find(
|
||||
query,
|
||||
{"messages": 0} # Projection to exclude messages
|
||||
).sort("sessionStartedAt", -1)
|
||||
|
||||
dialog_docs = list(cursor) # Convert cursor to list
|
||||
|
||||
# --- Serialize results using the schema ---
|
||||
output_schema = DialogSummarySchema(many=True)
|
||||
# Schema handles ObjectId and datetime conversion, and field exclusion
|
||||
serialized_result = output_schema.dump(dialog_docs)
|
||||
|
||||
return jsonify({"dialogs": serialized_result}), 200
|
||||
|
||||
except KeyError: # Should be caught by initial user_id check
|
||||
logger.error(f"User ID (_id) not found in token payload for list_dialog_sessions.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing dialogs for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "Internal server error listing dialog sessions."}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:dialog_id>', methods=['GET']) # Path relative to prefix
|
||||
@token_required
|
||||
def get_dialog_session(current_user, dialog_id):
|
||||
"""
|
||||
Retrieves the full details of a specific dialog session, including messages.
|
||||
Uses DialogSchema for output serialization. Verifies ownership.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not DialogSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
try:
|
||||
# Validate IDs
|
||||
user_id_str = str(current_user.get("_id"))
|
||||
if not user_id_str: return jsonify({"message": "Missing user ID in token."}), 400
|
||||
try:
|
||||
user_id = ObjectId(user_id_str)
|
||||
dial_obj_id = ObjectId(dialog_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid user or dialog ID format."}), 400
|
||||
|
||||
# Find the specific dialog owned by the user
|
||||
db = mongo.db
|
||||
doc = db.dialog_activity.find_one({"_id": dial_obj_id, "uid": user_id})
|
||||
if not doc:
|
||||
return jsonify({"message": "Dialog session not found or access denied."}), 404
|
||||
|
||||
# --- Serialize results using the schema ---
|
||||
output_schema = DialogSchema()
|
||||
# Schema handles ObjectId, datetime, and nested message formatting
|
||||
serialized_result = output_schema.dump(doc)
|
||||
|
||||
return jsonify(serialized_result), 200
|
||||
|
||||
except KeyError: # Should be caught by initial user_id check
|
||||
logger.error(f"User ID (_id) not found in token payload for get_dialog_session.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error retrieving dialog {dialog_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "Internal server error retrieving dialog session."}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:dialog_id>/end', methods=['PUT']) # Path relative to prefix
|
||||
@token_required
|
||||
def end_dialog_session(current_user, dialog_id):
|
||||
"""
|
||||
Marks a dialog session as ended by setting the 'sessionEndedAt' timestamp.
|
||||
Prevents ending an already ended session. Verifies ownership.
|
||||
(No schema needed for input/output here)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
|
||||
try:
|
||||
# Validate IDs
|
||||
user_id_str = str(current_user.get("_id"))
|
||||
if not user_id_str: return jsonify({"message": "Missing user ID in token."}), 400
|
||||
try:
|
||||
user_id = ObjectId(user_id_str)
|
||||
dial_obj_id = ObjectId(dialog_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid user or dialog ID format."}), 400
|
||||
|
||||
# Check if session exists, belongs to user, and is not already ended
|
||||
db = mongo.db
|
||||
existing_doc = db.dialog_activity.find_one({"_id": dial_obj_id, "uid": user_id}, {"sessionEndedAt": 1})
|
||||
if not existing_doc:
|
||||
return jsonify({"message": "Dialog session not found or access denied."}), 404
|
||||
if existing_doc.get("sessionEndedAt") is not None:
|
||||
# 409 Conflict - the session is already in the 'ended' state
|
||||
return jsonify({"message": "Dialog session has already been ended."}), 409
|
||||
|
||||
# Update the document to set the end time
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
result = db.dialog_activity.update_one(
|
||||
{"_id": dial_obj_id, "uid": user_id, "sessionEndedAt": None}, # Ensure it's not already ended atomically
|
||||
{"$set": {"sessionEndedAt": now}}
|
||||
)
|
||||
|
||||
# Check if the update was successful
|
||||
if result.modified_count == 1:
|
||||
return jsonify({"message": "Dialog session marked as ended."}), 200
|
||||
elif result.matched_count == 1 and result.modified_count == 0:
|
||||
# This could happen if the session was ended between find_one and update_one (race condition)
|
||||
logger.warning(f"Dialog {dialog_id} was already ended before update (race condition?).")
|
||||
return jsonify({"message": "Dialog session was already ended."}), 409
|
||||
else: # matched_count == 0 (shouldn't happen if find_one worked unless deleted concurrently)
|
||||
logger.warning(f"Dialog {dialog_id} matched 0 for ending update.")
|
||||
return jsonify({"message": "Dialog session not found or already ended."}), 404
|
||||
|
||||
|
||||
except KeyError: # Should be caught by initial user_id check
|
||||
logger.error(f"User ID (_id) not found in token payload for end_dialog_session.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error ending dialog {dialog_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "Internal server error ending dialog session."}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:dialog_id>', methods=['DELETE']) # Path relative to prefix
|
||||
@token_required
|
||||
def delete_dialog_session(current_user, dialog_id):
|
||||
"""
|
||||
Deletes an entire dialog session document. Verifies ownership.
|
||||
(No schema needed for input/output here)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
|
||||
try:
|
||||
# Validate IDs
|
||||
user_id_str = str(current_user.get("_id"))
|
||||
if not user_id_str: return jsonify({"message": "Missing user ID in token."}), 400
|
||||
try:
|
||||
user_id = ObjectId(user_id_str)
|
||||
dial_obj_id = ObjectId(dialog_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid user or dialog ID format."}), 400
|
||||
|
||||
# Perform deletion, ensuring the user owns the dialog
|
||||
db = mongo.db
|
||||
result = db.dialog_activity.delete_one({"_id": dial_obj_id, "uid": user_id})
|
||||
|
||||
# Check if a document was deleted
|
||||
if result.deleted_count == 1:
|
||||
return jsonify({"message": "Dialog session deleted successfully."}), 200 # 200 OK or 204 No Content
|
||||
else:
|
||||
# If deleted_count is 0, the document either didn't exist or didn't belong to the user
|
||||
return jsonify({"message": "Dialog session not found or access denied."}), 404
|
||||
|
||||
except KeyError: # Should be caught by initial user_id check
|
||||
logger.error(f"User ID (_id) not found in token payload for delete_dialog_session.")
|
||||
return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401
|
||||
except AttributeError:
|
||||
logger.error("PyMongo extension not initialized or attached correctly.")
|
||||
return jsonify({"message": "Database configuration error."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting dialog {dialog_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True)
|
||||
return jsonify({"message": "Internal server error deleting dialog session."}), 500
|
||||
|
||||
24
backend_flask/myapp/extensions.py
Normal file
24
backend_flask/myapp/extensions.py
Normal file
@ -0,0 +1,24 @@
|
||||
# backend/myapp/extensions.py
|
||||
|
||||
"""
|
||||
Central place to instantiate Flask extension objects.
|
||||
These objects are initialized with the app instance later in the application factory.
|
||||
"""
|
||||
|
||||
from flask_pymongo import PyMongo
|
||||
from flask_cors import CORS
|
||||
|
||||
# from flask_jwt_extended import JWTManager
|
||||
from flask_marshmallow import Marshmallow
|
||||
|
||||
# Add other necessary extension imports (e.g., Migrate if using SQL + Alembic)
|
||||
|
||||
# Instantiate extensions without the app object
|
||||
mongo = PyMongo()
|
||||
cors = CORS()
|
||||
# jwt = JWTManager()
|
||||
ma = Marshmallow()
|
||||
# migrate = Migrate() # Example if using Flask-Migrate
|
||||
|
||||
# You can add other globally used utilities here if needed,
|
||||
# but primarily focus on Flask extensions.
|
||||
4
backend_flask/myapp/models.py
Normal file
4
backend_flask/myapp/models.py
Normal file
@ -0,0 +1,4 @@
|
||||
from flask_pymongo import PyMongo
|
||||
|
||||
# Create a global PyMongo instance.
|
||||
mongo = PyMongo()
|
||||
13
backend_flask/myapp/projects/__init__.py
Normal file
13
backend_flask/myapp/projects/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
# myapp/projects/__init__.py
|
||||
|
||||
from flask import Blueprint
|
||||
|
||||
# Define the Blueprint instance for the projects module.
|
||||
# 'projects' is the unique name for this blueprint.
|
||||
# url_prefix='/api/projects' will be prepended to all routes defined in this blueprint.
|
||||
bp = Blueprint('projects', __name__, url_prefix='/api/projects')
|
||||
|
||||
# Import the routes module.
|
||||
# This connects the routes defined in routes.py to the 'bp' instance.
|
||||
# This import MUST come AFTER the Blueprint 'bp' is defined.
|
||||
from . import projects_routes
|
||||
715
backend_flask/myapp/projects/projects_routes.py
Normal file
715
backend_flask/myapp/projects/projects_routes.py
Normal file
@ -0,0 +1,715 @@
|
||||
# myapp/projects/projects_routes.py
|
||||
|
||||
import datetime
|
||||
import os # Needed for checking environment variables (e.g., for OpenAI key)
|
||||
import logging
|
||||
from flask import request, jsonify, current_app, has_app_context # Flask utilities
|
||||
from bson.objectid import ObjectId, InvalidId # For MongoDB ObjectIds
|
||||
from collections import defaultdict # May be used in helper logic
|
||||
from functools import wraps # Import wraps for dummy decorator
|
||||
|
||||
|
||||
# --- Local Blueprint Import ---
|
||||
from . import bp # Import the 'bp' instance defined in the local __init__.py
|
||||
|
||||
# --- Shared Extensions and Utilities Imports ---
|
||||
try:
|
||||
from ..extensions import mongo # Import the initialized PyMongo instance
|
||||
# Import utilities from the parent 'myapp/utils.py'
|
||||
from ..utils import token_required, generate_passkey
|
||||
except ImportError:
|
||||
# Fallback or error handling if imports fail
|
||||
print("Warning: Could not import mongo, token_required, or generate_passkey in projects/projects_routes.py.")
|
||||
mongo = None
|
||||
generate_passkey = lambda: "error_generating_passkey" # Dummy function
|
||||
# Define a dummy decorator if token_required is missing
|
||||
def token_required(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
print("ERROR: token_required decorator is not available!")
|
||||
return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500
|
||||
return wrapper
|
||||
|
||||
# --- Schema Imports ---
|
||||
try:
|
||||
# Import the relevant schemas defined in schemas.py
|
||||
from ..schemas import (
|
||||
ProjectCreateSchema, ProjectUpdateSchema, ProjectSchema,
|
||||
ProjectListSchema # Use ProjectListSchema for the list endpoint
|
||||
)
|
||||
from marshmallow import ValidationError
|
||||
except ImportError:
|
||||
print("Warning: Could not import Project schemas or ValidationError in projects/projects_routes.py.")
|
||||
ProjectCreateSchema = None
|
||||
ProjectUpdateSchema = None
|
||||
ProjectSchema = None
|
||||
ProjectListSchema = None
|
||||
ValidationError = None
|
||||
|
||||
# --- Celery Task Import ---
|
||||
# IMPORTANT: Assumes the project root directory ('your_fullstack_project/') is in PYTHONPATH
|
||||
try:
|
||||
from backend_flask.celery_worker.celery_app import async_recalc_project_keywords
|
||||
except ModuleNotFoundError:
|
||||
print("Warning: Could not import 'async_recalc_project_keywords' from 'celery_worker'. Ensure project root is in PYTHONPATH.")
|
||||
# Define a dummy task function to prevent NameError if Celery isn't set up
|
||||
def _dummy_celery_task(*args, **kwargs):
|
||||
task_name = args[0] if args else 'dummy_task'
|
||||
print(f"ERROR: Celery task {task_name} not available!")
|
||||
class DummyTask:
|
||||
def __init__(self, name):
|
||||
self.__name__ = name
|
||||
def delay(self, *a, **kw):
|
||||
print(f"ERROR: Tried to call delay() on dummy task {self.__name__}")
|
||||
pass
|
||||
return DummyTask(task_name)
|
||||
async_recalc_project_keywords = _dummy_celery_task('async_recalc_project_keywords')
|
||||
|
||||
|
||||
# --- Dialog Helper Import ---
|
||||
# Import the helper function from the sibling 'dialog' blueprint's routes module
|
||||
try:
|
||||
# Assumes the function is defined in myapp/dialog/dialog_routes.py
|
||||
from ..dialog.dialog_routes import generate_knowledge_base_message
|
||||
except ImportError:
|
||||
print("Warning: Could not import 'generate_knowledge_base_message' from dialog blueprint.")
|
||||
# Define a dummy function
|
||||
generate_knowledge_base_message = lambda pid: "Error: Knowledge base function not available."
|
||||
|
||||
# --- External Lib Imports (for summarize_project) ---
|
||||
# Import conditionally to avoid errors if not installed
|
||||
try:
|
||||
import google.generativeai as genai
|
||||
from google.api_core import exceptions as google_exceptions
|
||||
except ImportError:
|
||||
print("Warning: google.generativeai not installed. Project summarization will fail.")
|
||||
genai = None
|
||||
google_exceptions = None
|
||||
|
||||
# --- Helper to get logger safely ---
|
||||
def _get_logger():
|
||||
if has_app_context():
|
||||
return current_app.logger
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
# Note: Routes use paths relative to the '/api/projects' prefix defined in __init__.py.
|
||||
|
||||
@bp.route('/', methods=['POST']) # Path relative to prefix
|
||||
@token_required
|
||||
def create_project(current_user):
|
||||
"""
|
||||
Create a new project for the authenticated user.
|
||||
Uses ProjectCreateSchema for input validation.
|
||||
Expects 'name' and optional 'topic', 'description' in JSON payload.
|
||||
Generates a unique passkey for the project.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
logger.error("Invalid current_user object received in create_project")
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in create_project: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not ProjectCreateSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
# Get and validate JSON data using the schema
|
||||
json_data = request.get_json() or {}
|
||||
schema = ProjectCreateSchema()
|
||||
try:
|
||||
validated_data = schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Create project validation failed: {err.messages}")
|
||||
return jsonify(err.messages), 422 # Return validation errors
|
||||
|
||||
# Extract validated data
|
||||
name = validated_data['name'] # Required field
|
||||
topic = validated_data.get('topic', "") # Optional field from schema
|
||||
description = validated_data.get('description', "") # Optional field from schema
|
||||
|
||||
try:
|
||||
# Generate a passkey for potential sharing/joining later
|
||||
passkey = generate_passkey()
|
||||
db = mongo.db # Use imported mongo instance
|
||||
|
||||
# Prepare project document data
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
project_data = {
|
||||
"ownerId": user_id,
|
||||
"collaborators": [], # Initially empty collaborator list
|
||||
"passkey": passkey, # Store the generated passkey
|
||||
"name": name.strip(), # Use validated and trimmed name
|
||||
"topic": topic,
|
||||
"description": description,
|
||||
"summary": "", # Initial empty summary
|
||||
"keywords": [], # Initial empty keywords
|
||||
"lastActivityBy": user_id, # Owner is the last active initially
|
||||
"createdAt": now,
|
||||
"updatedAt": now
|
||||
}
|
||||
|
||||
# Insert the new project document
|
||||
result = db.projects.insert_one(project_data)
|
||||
project_id = str(result.inserted_id)
|
||||
|
||||
# Return success response with project ID and passkey
|
||||
return jsonify({
|
||||
"message": "Project created successfully.",
|
||||
"project_id": project_id,
|
||||
"passkey": passkey # Return passkey so owner knows it
|
||||
}), 201 # 201 Created status code
|
||||
|
||||
except Exception as e:
|
||||
# Log the detailed error for debugging
|
||||
logger.error(f"Error creating project for user {user_id}: {e}", exc_info=True)
|
||||
# Return a generic error message to the client
|
||||
return jsonify({"message": "An error occurred while creating the project."}), 500
|
||||
|
||||
|
||||
@bp.route('/', methods=['GET']) # Path relative to prefix
|
||||
@token_required
|
||||
def get_projects(current_user):
|
||||
"""
|
||||
Retrieve a summary list (ID, name, updatedAt) of projects where the
|
||||
authenticated user is either the owner or a collaborator.
|
||||
Uses ProjectListSchema for output serialization.
|
||||
Sorted by last update time descending.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in get_projects: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not ProjectListSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
try:
|
||||
db = mongo.db
|
||||
# Query for projects owned by or collaborated on by the user
|
||||
projects_cursor = db.projects.find(
|
||||
{
|
||||
"$or": [
|
||||
{"ownerId": user_id},
|
||||
{"collaborators": user_id} # Check if user ID is in the collaborators array
|
||||
]
|
||||
},
|
||||
# Projection: only retrieve fields needed by the ProjectListSchema
|
||||
{"name": 1, "updatedAt": 1, "_id": 1}
|
||||
).sort("updatedAt", -1) # Sort by most recently updated
|
||||
|
||||
project_docs = list(projects_cursor) # Convert cursor to list
|
||||
|
||||
# --- Serialize results using the schema ---
|
||||
output_schema = ProjectListSchema(many=True)
|
||||
# Schema handles ObjectId and datetime conversion
|
||||
serialized_result = output_schema.dump(project_docs)
|
||||
|
||||
# Return the serialized list of project summaries
|
||||
return jsonify({"projects": serialized_result}), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching projects for user {user_id}: {e}", exc_info=True)
|
||||
# Use a generic error message for the client
|
||||
return jsonify({"message": "An error occurred while fetching projects."}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:project_id>', methods=['GET']) # Path relative to prefix
|
||||
@token_required
|
||||
def get_project_detail(current_user, project_id):
|
||||
"""
|
||||
Retrieve detailed information for a specific project by its ID.
|
||||
Uses ProjectSchema for output serialization.
|
||||
Verifies user access (owner or collaborator).
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in get_project_detail: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not ProjectSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
try:
|
||||
db = mongo.db
|
||||
# Validate the provided project ID format
|
||||
try:
|
||||
obj_project_id = ObjectId(project_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid project ID format."}), 400
|
||||
|
||||
# Find the project by ID
|
||||
project_doc = db.projects.find_one({"_id": obj_project_id})
|
||||
if not project_doc:
|
||||
return jsonify({"message": "Project not found."}), 404 # 404 Not Found
|
||||
|
||||
# Verify ownership or collaboration status for access control
|
||||
owner_id = project_doc.get("ownerId")
|
||||
collaborators = project_doc.get("collaborators", [])
|
||||
if not owner_id: # Check for data integrity
|
||||
logger.error(f"Project {project_id} is missing ownerId.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "Access denied to this project."}), 403 # 403 Forbidden
|
||||
|
||||
# --- Serialize results using the schema ---
|
||||
output_schema = ProjectSchema()
|
||||
# Schema handles ObjectId, datetime, nested keywords, and field selection
|
||||
serialized_result = output_schema.dump(project_doc)
|
||||
|
||||
return jsonify(serialized_result), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching project detail for {project_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while fetching project details."}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:project_id>', methods=['PUT']) # Path relative to prefix
|
||||
@token_required
|
||||
def update_project(current_user, project_id):
|
||||
"""
|
||||
Update details of an existing project.
|
||||
Uses ProjectUpdateSchema for input validation.
|
||||
Only allows updating specific fields: name, collaborators, topic, description, keywords.
|
||||
Requires the authenticated user to be the project owner.
|
||||
Returns the updated project details using ProjectSchema.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in update_project: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not ProjectUpdateSchema or not ProjectSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
# Get and validate JSON data using the schema
|
||||
json_data = request.get_json() or {}
|
||||
schema = ProjectUpdateSchema()
|
||||
try:
|
||||
# Load validates allowed fields and their types (like collaborators list of strings)
|
||||
validated_data = schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Update project validation failed: {err.messages}")
|
||||
return jsonify(err.messages), 422
|
||||
|
||||
# If validation passed but no valid fields were provided
|
||||
if not validated_data:
|
||||
return jsonify({"message": "No valid fields provided for update."}), 400
|
||||
|
||||
try:
|
||||
db = mongo.db
|
||||
# Validate project ID format
|
||||
try:
|
||||
obj_project_id = ObjectId(project_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid project ID format."}), 400
|
||||
|
||||
# Find the project
|
||||
project = db.projects.find_one({"_id": obj_project_id}, {"ownerId": 1}) # Fetch ownerId for check
|
||||
if not project:
|
||||
return jsonify({"message": "Project not found."}), 404
|
||||
|
||||
# Verify ownership for update permission
|
||||
owner_id = project.get("ownerId")
|
||||
if not owner_id:
|
||||
logger.error(f"Project {project_id} is missing ownerId during update.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
if owner_id != user_id:
|
||||
return jsonify({"message": "Only the project owner can update this project."}), 403
|
||||
|
||||
# --- Prepare Update Fields based on validated data ---
|
||||
update_fields = {}
|
||||
# Convert collaborator strings back to ObjectIds if present
|
||||
if "collaborators" in validated_data:
|
||||
try:
|
||||
update_fields["collaborators"] = [ObjectId(cid) for cid in validated_data["collaborators"]]
|
||||
# Optional: Add check here to ensure collaborator IDs exist and are not the owner
|
||||
except (InvalidId, TypeError):
|
||||
# This should ideally be caught by schema validation if using _validate_object_id
|
||||
return jsonify({"message": "Invalid collaborator ID format received."}), 400
|
||||
# Copy other validated fields directly
|
||||
for field in ["name", "topic", "description", "keywords"]:
|
||||
if field in validated_data:
|
||||
update_fields[field] = validated_data[field]
|
||||
|
||||
|
||||
# Always update the 'updatedAt' timestamp
|
||||
update_fields["updatedAt"] = datetime.datetime.now(datetime.timezone.utc)
|
||||
# Note: lastActivityBy is NOT updated here.
|
||||
|
||||
# Perform the update operation
|
||||
result = db.projects.update_one({"_id": obj_project_id}, {"$set": update_fields})
|
||||
|
||||
# Check if the update was successful
|
||||
if result.matched_count == 1:
|
||||
# Retrieve the updated project document to return it
|
||||
updated_project_doc = db.projects.find_one({"_id": obj_project_id})
|
||||
if updated_project_doc:
|
||||
# Serialize the updated document using the detail schema
|
||||
output_schema = ProjectSchema()
|
||||
serialized_project = output_schema.dump(updated_project_doc)
|
||||
return jsonify({"message": "Project updated successfully.", "project": serialized_project}), 200
|
||||
else:
|
||||
logger.warning(f"Project {project_id} updated but could not be retrieved.")
|
||||
return jsonify({"message": "Project updated successfully, but failed to retrieve updated data."}), 200
|
||||
else:
|
||||
# Matched count was 0
|
||||
return jsonify({"message": "Project update failed (document not found)."}), 404
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating project {project_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while updating the project."}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:project_id>', methods=['DELETE']) # Path relative to prefix
|
||||
@token_required
|
||||
def delete_project(current_user, project_id):
|
||||
"""
|
||||
Delete a project and cascade deletion of associated URLs, activity logs, and dialogs.
|
||||
Requires the authenticated user to be the project owner.
|
||||
(No schema needed for input/output here)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in delete_project: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
|
||||
try:
|
||||
db = mongo.db
|
||||
# Validate project ID format
|
||||
try:
|
||||
obj_project_id = ObjectId(project_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid project ID format."}), 400
|
||||
|
||||
# Find the project
|
||||
project = db.projects.find_one({"_id": obj_project_id}, {"ownerId": 1})
|
||||
if not project:
|
||||
return jsonify({"message": "Project not found."}), 404
|
||||
|
||||
# Verify ownership for delete permission
|
||||
owner_id = project.get("ownerId")
|
||||
if not owner_id:
|
||||
logger.error(f"Project {project_id} is missing ownerId during delete.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
if owner_id != user_id:
|
||||
return jsonify({"message": "Only the project owner can delete this project."}), 403
|
||||
|
||||
# --- Perform Deletions (Consider Transactions if available/needed) ---
|
||||
|
||||
# 1. Delete the project document itself
|
||||
delete_project_result = db.projects.delete_one({"_id": obj_project_id})
|
||||
if delete_project_result.deleted_count == 0:
|
||||
logger.warning(f"Project {project_id} found but delete_one removed 0 documents.")
|
||||
return jsonify({"message": "Project deletion failed (already deleted?)."}), 404
|
||||
|
||||
# 2. Cascade delete associated URLs
|
||||
delete_urls_result = db.urls.delete_many({"projectId": obj_project_id})
|
||||
logger.info(f"Deleted {delete_urls_result.deleted_count} URLs for project {project_id}")
|
||||
|
||||
# 3. Cascade delete associated activity logs
|
||||
delete_activity_result = db.project_activity.delete_many({"projectId": obj_project_id})
|
||||
logger.info(f"Deleted {delete_activity_result.deleted_count} activity logs for project {project_id}")
|
||||
|
||||
# 4. Cascade delete associated dialog sessions
|
||||
delete_dialog_result = db.dialog_activity.delete_many({"projectId": obj_project_id})
|
||||
logger.info(f"Deleted {delete_dialog_result.deleted_count} dialog sessions for project {project_id}")
|
||||
|
||||
# --- End Deletions ---
|
||||
|
||||
return jsonify({"message": "Project and associated data deleted successfully."}), 200 # 200 OK or 204 No Content
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting project {project_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while deleting the project."}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:project_id>/info', methods=['GET']) # Path relative to prefix
|
||||
@token_required
|
||||
def get_project_info(current_user, project_id):
|
||||
"""
|
||||
Retrieve basic informational fields for a project.
|
||||
Uses ProjectSchema for output serialization (implicitly selects fields).
|
||||
Verifies user access (owner or collaborator).
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in get_project_info: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not ProjectSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
|
||||
try:
|
||||
db = mongo.db
|
||||
# Validate project ID format
|
||||
try:
|
||||
obj_project_id = ObjectId(project_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid project ID format."}), 400
|
||||
|
||||
# Find the project, projecting only necessary fields + access control fields
|
||||
# Schema will handle final field selection for output
|
||||
project_doc = db.projects.find_one(
|
||||
{"_id": obj_project_id} # Fetch full doc for schema dump
|
||||
# {"name": 1, "topic": 1, "description": 1, "keywords": 1, "summary": 1, "ownerId": 1, "collaborators": 1}
|
||||
)
|
||||
if not project_doc:
|
||||
return jsonify({"message": "Project not found."}), 404
|
||||
|
||||
# Verify access
|
||||
owner_id = project_doc.get("ownerId")
|
||||
collaborators = project_doc.get("collaborators", [])
|
||||
if not owner_id:
|
||||
logger.error(f"Project {project_id} is missing ownerId in get_project_info.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "Access denied to this project's info."}), 403
|
||||
|
||||
# --- Serialize using ProjectSchema ---
|
||||
# The schema definition controls which fields are included in the output
|
||||
output_schema = ProjectSchema()
|
||||
serialized_result = output_schema.dump(project_doc)
|
||||
|
||||
# The ProjectSchema includes more than just the 'info' fields,
|
||||
# adjust schema or create ProjectInfoSchema if only specific fields are desired.
|
||||
# For now, returning the standard ProjectSchema output.
|
||||
return jsonify(serialized_result), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting project info for {project_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while retrieving project info."}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:project_id>/recalc_keywords', methods=['PUT']) # Path relative to prefix
|
||||
@token_required
|
||||
def recalc_project_keywords(current_user, project_id):
|
||||
"""
|
||||
Triggers an asynchronous Celery task to recalculate project keywords.
|
||||
Verifies user access (owner or collaborator).
|
||||
(No schema needed for input/output here)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
user_id_str = str(user_id) # Keep string version for Celery task if needed
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in recalc_project_keywords: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
|
||||
try:
|
||||
db = mongo.db
|
||||
# Validate project ID format
|
||||
try:
|
||||
obj_project_id = ObjectId(project_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid project ID format."}), 400
|
||||
|
||||
# Verify project exists and user has access before queueing task
|
||||
project = db.projects.find_one(
|
||||
{"_id": obj_project_id},
|
||||
{"ownerId": 1, "collaborators": 1} # Only fetch fields needed for access check
|
||||
)
|
||||
if not project:
|
||||
return jsonify({"message": "Project not found."}), 404
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if not owner_id:
|
||||
logger.error(f"Project {project_id} is missing ownerId in recalc_keywords.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "Access denied to trigger keyword recalculation for this project."}), 403
|
||||
|
||||
|
||||
# --- Queue the Celery Task ---
|
||||
try:
|
||||
# Call the .delay() method on the imported Celery task
|
||||
task_result = async_recalc_project_keywords.delay(project_id, user_id_str)
|
||||
logger.info(f"Queued keyword recalc task {task_result.id} for project {project_id}")
|
||||
# Return 202 Accepted status code to indicate task was queued
|
||||
return jsonify({"message": "Project keywords recalculation task queued successfully."}), 202
|
||||
except NameError:
|
||||
logger.error("Celery task 'async_recalc_project_keywords' is not defined or imported correctly.")
|
||||
return jsonify({"message": "Server configuration error: Keyword recalculation feature unavailable."}), 500
|
||||
except Exception as e:
|
||||
# Catch errors related to Celery connection or queueing
|
||||
logger.error(f"Error queueing recalc keywords task for project {project_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while queueing the keywords recalculation task."}), 500
|
||||
|
||||
except Exception as e:
|
||||
# Catch general errors before task queueing
|
||||
logger.error(f"Error in recalc_project_keywords endpoint for project {project_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An internal error occurred before queueing the task."}), 500
|
||||
|
||||
|
||||
@bp.route('/<string:project_id>/summarize', methods=['PUT']) # Path relative to prefix
|
||||
@token_required
|
||||
def summarize_project(current_user, project_id):
|
||||
"""
|
||||
Generates a summary for the project using its associated URL knowledge base
|
||||
and an external LLM (Gemini). Updates the project's summary field.
|
||||
Requires the user to have a selected Gemini API key configured.
|
||||
Verifies user access (owner or collaborator).
|
||||
(No schema needed for input, output is summary string)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in summarize_project: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not genai or not google_exceptions: return jsonify({"message": "Gemini API library not available."}), 500
|
||||
|
||||
try:
|
||||
db = mongo.db
|
||||
# Validate project ID format
|
||||
try:
|
||||
obj_project_id = ObjectId(project_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid project ID format."}), 400
|
||||
|
||||
# Verify project exists and user has access
|
||||
project = db.projects.find_one(
|
||||
{"_id": obj_project_id},
|
||||
{"ownerId": 1, "collaborators": 1} # Only fetch fields needed for access check
|
||||
)
|
||||
if not project:
|
||||
return jsonify({"message": "Project not found."}), 404
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if not owner_id:
|
||||
logger.error(f"Project {project_id} is missing ownerId in summarize_project.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "Access denied to summarize this project."}), 403
|
||||
|
||||
# --- Check for User's Gemini API Key ---
|
||||
api_doc = db.api_list.find_one({"uid": user_id, "selected": True, "name": "Gemini"})
|
||||
if not (api_doc and api_doc.get("key")):
|
||||
return jsonify({"message": "Summarization requires a selected Gemini API key. Please configure it in API Keys."}), 400 # 400 Bad Request - missing prereq
|
||||
gemini_key = api_doc.get("key")
|
||||
|
||||
# --- Generate Knowledge Base and Prompt ---
|
||||
# Use the imported helper function from the dialog blueprint
|
||||
kb_message = generate_knowledge_base_message(obj_project_id) # Pass ObjectId
|
||||
if not kb_message or kb_message.startswith("Error:") : # Handle error from helper
|
||||
logger.warning(f"Knowledge base generation failed or was empty for project {project_id}. KB: {kb_message}")
|
||||
kb_message = "No external knowledge base content available for this project." # Fallback
|
||||
|
||||
# Construct the prompt for Gemini
|
||||
prompt = (
|
||||
f"You are an expert research assistant tasked with summarizing a project. "
|
||||
f"Below is the external knowledge base compiled from websites associated with this project.\n\n"
|
||||
f"--- External Knowledge Base ---\n{kb_message}\n--- End Knowledge Base ---\n\n"
|
||||
f"Based ONLY on the provided knowledge base (do not use external information), please generate a concise and comprehensive summary "
|
||||
f"of the project's main focus, key topics, and potential research directions. Aim for approximately 300 words, maximum 400 words."
|
||||
)
|
||||
|
||||
# --- Call Gemini API ---
|
||||
summary_text = "[Summary generation failed]" # Default
|
||||
try:
|
||||
genai.configure(api_key=gemini_key)
|
||||
# Use the constant defined earlier or get from config
|
||||
model = genai.GenerativeModel(current_app.config["GEMINI_MODEL_NAME"])
|
||||
gemini_input = [{"role": "user", "parts": [{"text": prompt}]}]
|
||||
# Consider adding safety settings if needed
|
||||
llm_response = model.generate_content(gemini_input)
|
||||
# Extract text, handling potential blocks
|
||||
try:
|
||||
summary_text = llm_response.text
|
||||
except ValueError:
|
||||
logger.warning(f"Gemini response for project {project_id} summary may have been blocked. Feedback: {llm_response.prompt_feedback}")
|
||||
summary_text = "[Summary generation blocked or failed]"
|
||||
|
||||
except google_exceptions.PermissionDenied as ex:
|
||||
logger.warning(f"Gemini Permission Denied for user {user_id} during summarization: {ex}")
|
||||
return jsonify({"message": "Gemini API Error: Invalid API key or insufficient permissions."}), 403
|
||||
except google_exceptions.ResourceExhausted as ex:
|
||||
logger.warning(f"Gemini Resource Exhausted for user {user_id} during summarization: {ex}")
|
||||
return jsonify({"message": "Gemini API Error: Rate limit or quota exceeded."}), 429
|
||||
except google_exceptions.GoogleAPIError as ex:
|
||||
logger.error(f"Gemini API communication error during summarization for project {project_id}: {ex}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while communicating with the Gemini API."}), 503
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error during Gemini call setup/execution for project {project_id} summary: {e}", exc_info=True)
|
||||
return jsonify({"message": "Internal server error during LLM communication."}), 500
|
||||
|
||||
# Check if the summary is empty after potential blocking
|
||||
if not summary_text or summary_text == "[Summary generation blocked or failed]":
|
||||
return jsonify({"message": "Failed to generate summary (LLM returned empty or blocked response)."}), 500
|
||||
|
||||
# --- Update Project Summary in DB ---
|
||||
try:
|
||||
update_result = db.projects.update_one(
|
||||
{"_id": obj_project_id},
|
||||
{"$set": {"summary": summary_text, "updatedAt": datetime.datetime.now(datetime.timezone.utc)}}
|
||||
)
|
||||
if update_result.matched_count == 0:
|
||||
# Project deleted between find and update?
|
||||
logger.warning(f"Project {project_id} not found during summary update.")
|
||||
return jsonify({"message": "Project not found while saving summary."}), 404
|
||||
|
||||
# Return success response with the generated summary
|
||||
return jsonify({"message": "Project summary generated and saved successfully.", "summary": summary_text}), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating project summary in DB for {project_id}: {e}", exc_info=True)
|
||||
# Inform user summary was generated but not saved
|
||||
return jsonify({"message": "Summary generated but failed to save to project.", "summary": summary_text}), 500
|
||||
|
||||
except Exception as e:
|
||||
# Catch-all for errors before API call or DB update
|
||||
logger.error(f"Error in summarize_project endpoint for project {project_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An internal error occurred during project summarization."}), 500
|
||||
|
||||
227
backend_flask/myapp/schemas.py
Normal file
227
backend_flask/myapp/schemas.py
Normal file
@ -0,0 +1,227 @@
|
||||
# myapp/schemas.py
|
||||
|
||||
from marshmallow import fields, validate, ValidationError, Schema, validates_schema
|
||||
from marshmallow.validate import OneOf
|
||||
from bson.objectid import ObjectId, InvalidId
|
||||
|
||||
# Import Marshmallow instance from extensions
|
||||
# Assumes 'ma = Marshmallow()' is defined in myapp/extensions.py
|
||||
# and initialized in myapp/__init__.py's create_app()
|
||||
try:
|
||||
from .extensions import ma
|
||||
except ImportError:
|
||||
# Basic fallback if extensions.py or 'ma' instance is missing
|
||||
print("WARNING: Flask-Marshmallow instance 'ma' not found in extensions. Falling back.")
|
||||
from flask_marshmallow import Marshmallow
|
||||
ma = Marshmallow()
|
||||
|
||||
# --- Custom Validators (Optional but useful) ---
|
||||
|
||||
def _validate_object_id(value):
|
||||
"""Validator function to ensure a string is a valid ObjectId."""
|
||||
try:
|
||||
ObjectId(value)
|
||||
except (InvalidId, TypeError, ValueError): # Catch potential errors
|
||||
raise ValidationError("Invalid ObjectId format.")
|
||||
|
||||
def _is_alphabetic_or_empty(value):
|
||||
"""Validator for keywords: allows empty string or purely alphabetic."""
|
||||
if value is not None and value != "" and not value.isalpha():
|
||||
raise ValidationError("Keyword must be alphabetic if not empty.")
|
||||
return True # Pass validation if empty or alphabetic
|
||||
|
||||
|
||||
# --- Base Schema for common fields ---
|
||||
class BaseSchema(ma.Schema):
|
||||
"""Base schema with common fields like ID and timestamps."""
|
||||
# Dump ObjectId as string, read-only
|
||||
id = fields.Function(lambda obj: str(obj.get("_id")), dump_only=True)
|
||||
# Dump datetime as ISO 8601 string, read-only
|
||||
createdAt = fields.DateTime(format='iso', dump_only=True)
|
||||
updatedAt = fields.DateTime(format='iso', dump_only=True)
|
||||
|
||||
# --- User Schemas (for auth blueprint) ---
|
||||
class UserRegistrationSchema(ma.Schema):
|
||||
"""Schema for validating user registration input."""
|
||||
username = fields.String(required=True, validate=validate.Length(min=3, max=64, error="Username must be between 3 and 64 characters."))
|
||||
email = fields.Email(required=True, error="Invalid email format.") # Built-in email validation
|
||||
password = fields.String(required=True, validate=validate.Length(min=8, error="Password must be at least 8 characters."), load_only=True) # load_only: Input only, never dumped
|
||||
|
||||
class UserLoginSchema(ma.Schema):
|
||||
"""Schema for validating user login input."""
|
||||
username = fields.String(required=True)
|
||||
password = fields.String(required=True, load_only=True) # Input only
|
||||
|
||||
class UserSchema(BaseSchema):
|
||||
"""Schema for serializing user data for output (excluding password)."""
|
||||
username = fields.String(dump_only=True)
|
||||
email = fields.Email(dump_only=True)
|
||||
# Inherits id, createdAt, updatedAt from BaseSchema
|
||||
|
||||
class Meta:
|
||||
# IMPORTANT: Explicitly exclude the password field (even if hashed) from output
|
||||
exclude = ("password",)
|
||||
|
||||
class UserUpdateSchema(ma.Schema):
|
||||
"""Schema for validating user account update input."""
|
||||
username = fields.String(validate=validate.Length(min=3, max=64)) # Optional update
|
||||
email = fields.Email() # Optional update
|
||||
password = fields.String(validate=validate.Length(min=8), load_only=True) # Optional update, input only
|
||||
|
||||
# --- API Key Schemas (for api_keys blueprint) ---
|
||||
ALLOWED_API_PROVIDERS = ["Gemini", "Deepseek", "Chatgpt"]
|
||||
|
||||
class APIKeyCreateSchema(ma.Schema):
|
||||
"""Schema for validating new API key creation input."""
|
||||
name = fields.String(required=True, validate=OneOf(ALLOWED_API_PROVIDERS, error=f"Provider name must be one of: {ALLOWED_API_PROVIDERS}"))
|
||||
key = fields.String(required=True, validate=validate.Length(min=5, error="API Key seems too short.")) # Basic length check
|
||||
selected = fields.Boolean(load_default=False) # Default to False if not provided on load
|
||||
|
||||
class APIKeyUpdateSchema(ma.Schema):
|
||||
"""Schema for validating API key update input."""
|
||||
# All fields are optional for update
|
||||
name = fields.String(validate=OneOf(ALLOWED_API_PROVIDERS, error=f"Provider name must be one of: {ALLOWED_API_PROVIDERS}"))
|
||||
key = fields.String(validate=validate.Length(min=5))
|
||||
selected = fields.Boolean()
|
||||
|
||||
class APIKeySchema(BaseSchema):
|
||||
"""Schema for serializing API key data for output."""
|
||||
# Inherits id, createdAt, updatedAt
|
||||
uid = fields.Function(lambda obj: str(obj.get("uid")), dump_only=True) # User ID as string
|
||||
name = fields.String(dump_only=True)
|
||||
key = fields.String(dump_only=True) # Consider masking part of the key for security: fields.Function(lambda obj: f"{obj.get('key', '')[:4]}...{obj.get('key', '')[-4:]}" if obj.get('key') else None, dump_only=True)
|
||||
selected = fields.Boolean(dump_only=True)
|
||||
|
||||
# --- Project Schemas (for projects blueprint) ---
|
||||
class KeywordSchema(ma.Schema):
|
||||
"""Schema for individual keywords within a project or URL."""
|
||||
word = fields.String(required=True, validate=_is_alphabetic_or_empty) # Allow empty string or alphabetic
|
||||
percentage = fields.Float(required=True, validate=validate.Range(min=0, max=100))
|
||||
|
||||
class ProjectCreateSchema(ma.Schema):
|
||||
"""Schema for validating new project creation input."""
|
||||
name = fields.String(required=True, validate=validate.Length(min=1, max=100, error="Project name must be between 1 and 100 characters."))
|
||||
topic = fields.String(validate=validate.Length(max=200)) # Optional topic
|
||||
description = fields.String(validate=validate.Length(max=1000)) # Optional description
|
||||
|
||||
class ProjectUpdateSchema(ma.Schema):
|
||||
"""Schema for validating project update input."""
|
||||
# Only allowed fields are optional
|
||||
name = fields.String(validate=validate.Length(min=1, max=100))
|
||||
topic = fields.String(validate=validate.Length(max=200))
|
||||
description = fields.String(validate=validate.Length(max=1000))
|
||||
collaborators = fields.List(fields.String(validate=_validate_object_id)) # List of user ID strings
|
||||
keywords = fields.List(fields.Nested(KeywordSchema)) # List of keyword objects
|
||||
|
||||
class ProjectSchema(BaseSchema):
|
||||
"""Schema for serializing detailed project data for output."""
|
||||
# Inherits id, createdAt, updatedAt
|
||||
ownerId = fields.Function(lambda obj: str(obj.get("ownerId")), dump_only=True)
|
||||
collaborators = fields.List(fields.Function(lambda oid: str(oid)), dump_only=True) # List of string IDs
|
||||
passkey = fields.String(dump_only=True) # Only dump passkey if absolutely necessary, usually not needed in GET responses
|
||||
name = fields.String(dump_only=True)
|
||||
topic = fields.String(dump_only=True)
|
||||
description = fields.String(dump_only=True)
|
||||
summary = fields.String(dump_only=True)
|
||||
keywords = fields.List(fields.Nested(KeywordSchema), dump_only=True)
|
||||
lastActivityBy = fields.Function(lambda obj: str(obj.get("lastActivityBy")) if isinstance(obj.get("lastActivityBy"), ObjectId) else None, dump_only=True)
|
||||
|
||||
class ProjectListSchema(ma.Schema):
|
||||
"""Schema for serializing the summary list of projects."""
|
||||
id = fields.Function(lambda obj: str(obj.get("_id")), dump_only=True)
|
||||
name = fields.String(dump_only=True)
|
||||
updatedAt = fields.DateTime(format='iso', dump_only=True)
|
||||
|
||||
# --- URL Schemas (for urls blueprint) ---
|
||||
class URLCreateSchema(ma.Schema):
|
||||
"""Schema for validating new URL creation input."""
|
||||
url = fields.URL(required=True, schemes={'http', 'https'}, error="Invalid URL format.") # Validate URL format
|
||||
|
||||
class URLUpdateSchema(ma.Schema):
|
||||
"""Schema for validating URL update input (only specific fields)."""
|
||||
title = fields.String(validate=validate.Length(max=500)) # Optional update
|
||||
starred = fields.Boolean() # Optional update
|
||||
note = fields.String() # Optional update
|
||||
keywords = fields.List(fields.Nested(KeywordSchema)) # Optional update, validate nested structure
|
||||
|
||||
class URLSchema(BaseSchema):
|
||||
"""Schema for serializing detailed URL data for output."""
|
||||
# Inherits id, createdAt, updatedAt
|
||||
projectId = fields.Function(lambda obj: str(obj.get("projectId")), dump_only=True)
|
||||
url = fields.URL(dump_only=True)
|
||||
title = fields.String(dump_only=True)
|
||||
favicon = fields.String(dump_only=True, allow_none=True)
|
||||
starred = fields.Boolean(dump_only=True)
|
||||
note = fields.String(dump_only=True)
|
||||
keywords = fields.List(fields.Nested(KeywordSchema), dump_only=True)
|
||||
summary = fields.String(dump_only=True)
|
||||
processingStatus = fields.String(dump_only=True, validate=OneOf(["pending", "processing", "completed", "failed"])) # Optional: validate status
|
||||
|
||||
class URLListSchema(ma.Schema):
|
||||
"""Schema for serializing the simplified list of URLs."""
|
||||
id = fields.Function(lambda obj: str(obj.get("_id")), dump_only=True)
|
||||
title = fields.String(dump_only=True)
|
||||
url = fields.URL(dump_only=True)
|
||||
|
||||
class URLSearchResultSchema(URLListSchema):
|
||||
"""Schema for search results (same as list for now)."""
|
||||
pass # Inherits fields from URLListSchema
|
||||
|
||||
# --- Activity Schemas (for activity blueprint) ---
|
||||
class ActivityCreateSchema(ma.Schema):
|
||||
"""Schema for validating new activity log creation."""
|
||||
projectId = fields.String(required=True, validate=_validate_object_id) # Validate as ObjectId string
|
||||
activityType = fields.String(required=True, validate=validate.Length(min=1))
|
||||
message = fields.String(load_default="") # Optional message
|
||||
|
||||
class ActivitySchema(BaseSchema):
|
||||
"""Schema for serializing activity log data."""
|
||||
# Inherits id, createdAt
|
||||
# Note: updatedAt is not typically used for immutable logs
|
||||
projectId = fields.Function(lambda obj: str(obj.get("projectId")), dump_only=True)
|
||||
userId = fields.Function(lambda obj: str(obj.get("userId")), dump_only=True)
|
||||
activityType = fields.String(dump_only=True)
|
||||
message = fields.String(dump_only=True)
|
||||
|
||||
# --- Dialog Schemas (for dialog blueprint) ---
|
||||
class MessageSchema(ma.Schema):
|
||||
"""Schema for individual messages within a dialog."""
|
||||
role = fields.String(required=True, validate=OneOf(["user", "system"], error="Role must be 'user' or 'system'."))
|
||||
content = fields.String(required=True)
|
||||
timestamp = fields.DateTime(format='iso', dump_only=True) # Only dump timestamp
|
||||
|
||||
class DialogCreateSchema(ma.Schema):
|
||||
"""Schema for validating new dialog session creation."""
|
||||
projectId = fields.String(required=True, validate=_validate_object_id)
|
||||
sessionId = fields.String() # Optional custom session ID
|
||||
startMessage = fields.String() # Optional initial message
|
||||
|
||||
class DialogSendMessageSchema(ma.Schema):
|
||||
"""Schema for validating user message input when sending to dialog."""
|
||||
content = fields.String(required=True, validate=validate.Length(min=1, error="Message content cannot be empty."))
|
||||
|
||||
class DialogSchema(BaseSchema):
|
||||
"""Schema for serializing detailed dialog session data (including messages)."""
|
||||
# Inherits id
|
||||
uid = fields.Function(lambda obj: str(obj.get("uid")), dump_only=True)
|
||||
projectId = fields.Function(lambda obj: str(obj.get("projectId")), dump_only=True)
|
||||
provider = fields.String(dump_only=True)
|
||||
sessionId = fields.String(dump_only=True) # Dump custom session ID if present
|
||||
sessionStartedAt = fields.DateTime(format='iso', dump_only=True)
|
||||
sessionEndedAt = fields.DateTime(format='iso', dump_only=True, allow_none=True) # Can be null
|
||||
messages = fields.List(fields.Nested(MessageSchema), dump_only=True) # Nested list of messages
|
||||
|
||||
class DialogSummarySchema(BaseSchema):
|
||||
"""Schema for serializing dialog session list (excluding messages)."""
|
||||
# Inherits id
|
||||
uid = fields.Function(lambda obj: str(obj.get("uid")), dump_only=True)
|
||||
projectId = fields.Function(lambda obj: str(obj.get("projectId")), dump_only=True)
|
||||
provider = fields.String(dump_only=True)
|
||||
sessionId = fields.String(dump_only=True)
|
||||
sessionStartedAt = fields.DateTime(format='iso', dump_only=True)
|
||||
sessionEndedAt = fields.DateTime(format='iso', dump_only=True, allow_none=True)
|
||||
|
||||
class Meta:
|
||||
# Exclude the potentially large messages array for list views
|
||||
exclude = ("messages",)
|
||||
|
||||
14
backend_flask/myapp/urls/__init__.py
Normal file
14
backend_flask/myapp/urls/__init__.py
Normal file
@ -0,0 +1,14 @@
|
||||
# myapp/urls/__init__.py
|
||||
|
||||
from flask import Blueprint
|
||||
|
||||
# Define the Blueprint instance for the URL management module.
|
||||
# 'urls' is the unique name for this blueprint.
|
||||
# url_prefix='/api' will be prepended to all routes defined in this blueprint.
|
||||
# Specific paths like '/projects/<id>/urls' or '/urls/<id>' will be defined in routes.py.
|
||||
bp = Blueprint('urls', __name__, url_prefix='/api')
|
||||
|
||||
# Import the routes module.
|
||||
# This connects the routes defined in routes.py to the 'bp' instance.
|
||||
# This import MUST come AFTER the Blueprint 'bp' is defined.
|
||||
from . import urls_routes
|
||||
817
backend_flask/myapp/urls/urls_routes.py
Normal file
817
backend_flask/myapp/urls/urls_routes.py
Normal file
@ -0,0 +1,817 @@
|
||||
# myapp/urls/urls_routes.py
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from flask import request, jsonify, current_app, has_app_context # Flask utilities
|
||||
from bson.objectid import ObjectId, InvalidId # For MongoDB ObjectIds
|
||||
from collections import defaultdict # Potentially useful for keyword aggregation etc.
|
||||
from functools import wraps # For creating dummy decorators
|
||||
import re # For escaping regex characters in search
|
||||
|
||||
# --- Local Blueprint Import ---
|
||||
from . import bp # Import the 'bp' instance defined in the local __init__.py
|
||||
|
||||
# --- Shared Extensions and Utilities Imports ---
|
||||
try:
|
||||
from ..extensions import mongo # Import the initialized PyMongo instance
|
||||
from ..utils import token_required # Import the authentication decorator
|
||||
except ImportError:
|
||||
# Fallback or error handling if imports fail
|
||||
print("Warning: Could not import mongo or token_required in urls/urls_routes.py.")
|
||||
mongo = None
|
||||
# Define a dummy decorator if token_required is missing
|
||||
def token_required(f):
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
print("ERROR: token_required decorator is not available!")
|
||||
return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500
|
||||
return wrapper
|
||||
|
||||
# --- Schema Imports ---
|
||||
try:
|
||||
# Import the relevant schemas defined in schemas.py
|
||||
from ..schemas import (
|
||||
URLCreateSchema, URLUpdateSchema, URLSchema,
|
||||
URLListSchema, URLSearchResultSchema
|
||||
)
|
||||
from marshmallow import ValidationError
|
||||
except ImportError:
|
||||
print("Warning: Could not import URL schemas or ValidationError in urls/urls_routes.py.")
|
||||
URLCreateSchema = None
|
||||
URLUpdateSchema = None
|
||||
URLSchema = None
|
||||
URLListSchema = None
|
||||
URLSearchResultSchema = None
|
||||
ValidationError = None
|
||||
|
||||
# --- Celery Task Imports ---
|
||||
# IMPORTANT: Assumes the project root directory ('your_fullstack_project/') is in PYTHONPATH
|
||||
try:
|
||||
from backend_flask.celery_worker.celery_app import async_extract_title_and_keywords, async_summarize_url, async_recalc_project_keywords
|
||||
except ModuleNotFoundError:
|
||||
print("Warning: Could not import Celery tasks from 'celery_worker'. Ensure project root is in PYTHONPATH.")
|
||||
# Define dummy task functions to prevent NameError if Celery isn't set up
|
||||
def _dummy_celery_task(*args, **kwargs):
|
||||
task_name = args[0] if args else 'dummy_task'
|
||||
print(f"ERROR: Celery task {task_name} not available!")
|
||||
class DummyTask:
|
||||
def __init__(self, name):
|
||||
self.__name__ = name
|
||||
def delay(self, *a, **kw):
|
||||
print(f"ERROR: Tried to call delay() on dummy task {self.__name__}")
|
||||
pass
|
||||
return DummyTask(task_name)
|
||||
|
||||
async_extract_title_and_keywords = _dummy_celery_task('async_extract_title_and_keywords')
|
||||
async_summarize_url = _dummy_celery_task('async_summarize_url')
|
||||
async_recalc_project_keywords = _dummy_celery_task('async_recalc_project_keywords')
|
||||
|
||||
|
||||
# --- Helper to get logger safely ---
|
||||
def _get_logger():
|
||||
if has_app_context():
|
||||
return current_app.logger
|
||||
return logging.getLogger(__name__)
|
||||
|
||||
# Note: Routes use paths relative to the '/api' prefix defined in __init__.py.
|
||||
|
||||
# --------------------------
|
||||
# Create URL Endpoint
|
||||
# Path: POST /api/projects/<project_id>/urls
|
||||
# --------------------------
|
||||
@bp.route('/projects/<string:project_id>/urls', methods=['POST'])
|
||||
@token_required
|
||||
def create_url(current_user, project_id):
|
||||
"""
|
||||
Create a new URL entry within a specific project.
|
||||
Uses URLCreateSchema for input validation.
|
||||
Expects 'url' and optional fields in JSON payload.
|
||||
Verifies project access for the authenticated user.
|
||||
Triggers background Celery tasks for title/keyword extraction and summarization.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
logger.error("Invalid current_user object received in create_url")
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
user_id_str = str(user_id) # Keep string version for Celery tasks
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in create_url: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not URLCreateSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
# Get and validate JSON data using the schema
|
||||
json_data = request.get_json() or {}
|
||||
logger.debug(f"create_url called: project_id={project_id}, data={json_data}, user_id={user_id_str}")
|
||||
schema = URLCreateSchema()
|
||||
try:
|
||||
# Validate only the required 'url' field initially
|
||||
validated_input = schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Create URL validation failed: {err.messages}")
|
||||
return jsonify(err.messages), 422
|
||||
|
||||
user_url = validated_input['url'] # URL is guaranteed by schema
|
||||
|
||||
try:
|
||||
# Validate project ID format from URL path
|
||||
try:
|
||||
project_obj_id = ObjectId(project_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid project ID format in URL path."}), 400
|
||||
|
||||
# --- Verify Project Access ---
|
||||
db = mongo.db
|
||||
project = db.projects.find_one({"_id": project_obj_id}, {"ownerId": 1, "collaborators": 1})
|
||||
if not project:
|
||||
return jsonify({"message": "Project not found."}), 404
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if not owner_id:
|
||||
logger.error(f"Project {project_obj_id} is missing ownerId field.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "You do not have access to add URLs to this project."}), 403
|
||||
|
||||
# --- Prepare URL Document (using original data for optional fields) ---
|
||||
# Optional fields are taken directly from original data, not schema output here
|
||||
keywords_data = data.get("keywords", []) # Process keywords manually as before
|
||||
keywords_converted = []
|
||||
if isinstance(keywords_data, list):
|
||||
for kw in keywords_data:
|
||||
if isinstance(kw, dict):
|
||||
word = kw.get("word", "").strip()
|
||||
if word:
|
||||
try:
|
||||
percentage = float(kw.get("percentage", 0.0))
|
||||
keywords_converted.append({"word": word, "percentage": percentage})
|
||||
except (ValueError, TypeError):
|
||||
logger.warning(f"Could not convert keyword percentage for word '{word}' during URL creation.")
|
||||
else:
|
||||
logger.warning("Non-dict item found in keywords during URL creation.")
|
||||
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
url_doc = {
|
||||
"projectId": project_obj_id,
|
||||
"url": user_url, # Use validated URL
|
||||
"title": data.get("title", "").strip(),
|
||||
"favicon": data.get("favicon", ""),
|
||||
"starred": bool(data.get("starred", False)),
|
||||
"note": data.get("note", "").strip(),
|
||||
"keywords": keywords_converted,
|
||||
"summary": data.get("summary", "").strip(),
|
||||
"processingStatus": "pending",
|
||||
"createdAt": now,
|
||||
"updatedAt": now
|
||||
}
|
||||
|
||||
# Insert the new URL document
|
||||
result = db.urls.insert_one(url_doc)
|
||||
new_url_id_str = str(result.inserted_id)
|
||||
logger.info(f"Successfully inserted URL {new_url_id_str} for project {project_id}")
|
||||
|
||||
# --- Trigger Background Tasks ---
|
||||
tasks_queued = True
|
||||
try:
|
||||
async_extract_title_and_keywords.delay(new_url_id_str, user_id_str)
|
||||
api_doc = db.api_list.find_one({"uid": user_id, "selected": True, "name": "Gemini"})
|
||||
use_gemini = bool(api_doc and api_doc.get("key"))
|
||||
async_summarize_url.delay(new_url_id_str, user_id_str, use_gemini)
|
||||
logger.info(f"Queued Celery tasks for URL {new_url_id_str} (use_gemini={use_gemini})")
|
||||
except NameError as ne:
|
||||
logger.error(f"Celery tasks not available for URL {new_url_id_str}: {ne}. Processing cannot be initiated.")
|
||||
tasks_queued = False
|
||||
except Exception as celery_err:
|
||||
logger.error(f"Failed to queue Celery tasks for URL {new_url_id_str}: {celery_err}", exc_info=True)
|
||||
tasks_queued = False
|
||||
|
||||
response_message = "URL created successfully and processing initiated." if tasks_queued else "URL created, but failed to initiate background processing."
|
||||
return jsonify({"message": response_message, "url_id": new_url_id_str}), 201
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating URL for project {project_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An internal error occurred while creating the URL."}), 500
|
||||
|
||||
|
||||
# --------------------------
|
||||
# List URLs for Project (Simplified)
|
||||
# Path: GET /api/projects/<project_id>/urls
|
||||
# --------------------------
|
||||
@bp.route('/projects/<string:project_id>/urls', methods=['GET'])
|
||||
@token_required
|
||||
def list_urls_for_project(current_user, project_id):
|
||||
"""
|
||||
Retrieve a simplified list (id, title, url) of all URLs within a specific project.
|
||||
Uses URLListSchema for output serialization.
|
||||
Verifies user access to the project.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in list_urls_for_project: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not URLListSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
try:
|
||||
# Validate project ID format from URL path
|
||||
try:
|
||||
obj_project_id = ObjectId(project_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid project ID format in URL path."}), 400
|
||||
|
||||
db = mongo.db
|
||||
# --- Verify Project Access ---
|
||||
project = db.projects.find_one(
|
||||
{"_id": obj_project_id},
|
||||
{"ownerId": 1, "collaborators": 1} # Projection for access check
|
||||
)
|
||||
if not project:
|
||||
return jsonify({"message": "Project not found."}), 404
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "You do not have access to this project's URLs."}), 403
|
||||
|
||||
# --- Fetch and Serialize URLs ---
|
||||
# Find URLs for the project, projecting only fields needed by schema
|
||||
cursor = db.urls.find(
|
||||
{"projectId": obj_project_id},
|
||||
{"_id": 1, "title": 1, "url": 1} # Projection matching URLListSchema
|
||||
).sort("updatedAt", -1) # Sort by most recently updated
|
||||
|
||||
url_docs = list(cursor) # Convert cursor to list
|
||||
|
||||
# Serialize using the schema
|
||||
output_schema = URLListSchema(many=True)
|
||||
serialized_result = output_schema.dump(url_docs)
|
||||
|
||||
# Return the serialized list of URLs
|
||||
return jsonify({"urls": serialized_result}), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing URLs for project {project_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while listing URLs."}), 500
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Get URL Detail
|
||||
# Path: GET /api/urls/<url_id>
|
||||
# --------------------------
|
||||
@bp.route('/urls/<string:url_id>', methods=['GET'])
|
||||
@token_required
|
||||
def get_url_detail(current_user, url_id):
|
||||
"""
|
||||
Retrieve the full details for a specific URL entry by its ID.
|
||||
Uses URLSchema for output serialization.
|
||||
Verifies user access via the associated project.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in get_url_detail: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not URLSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
try:
|
||||
# Validate URL ID format from URL path
|
||||
try:
|
||||
obj_url_id = ObjectId(url_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid URL ID format."}), 400
|
||||
|
||||
db = mongo.db
|
||||
# Find the URL document
|
||||
url_doc = db.urls.find_one({"_id": obj_url_id})
|
||||
if not url_doc:
|
||||
return jsonify({"message": "URL not found."}), 404
|
||||
|
||||
# --- Verify Project Access ---
|
||||
project_obj_id = url_doc.get("projectId")
|
||||
if not project_obj_id or not isinstance(project_obj_id, ObjectId):
|
||||
logger.error(f"URL {url_id} has missing or invalid projectId.")
|
||||
return jsonify({"message": "URL data integrity issue (missing project link)."}), 500
|
||||
|
||||
project = db.projects.find_one(
|
||||
{"_id": project_obj_id},
|
||||
{"ownerId": 1, "collaborators": 1} # Projection for access check
|
||||
)
|
||||
if not project:
|
||||
logger.error(f"Project {project_obj_id} associated with URL {url_id} not found.")
|
||||
return jsonify({"message": "Associated project not found; cannot verify access."}), 404 # Or 500
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if not owner_id:
|
||||
logger.error(f"Project {project_obj_id} is missing ownerId in get_url_detail.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "You do not have access to the project containing this URL."}), 403
|
||||
|
||||
# --- Serialize and Return URL Details ---
|
||||
output_schema = URLSchema()
|
||||
# Schema handles ObjectId, datetime conversion, and field selection
|
||||
serialized_result = output_schema.dump(url_doc)
|
||||
|
||||
return jsonify(serialized_result), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error retrieving URL detail for {url_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while retrieving the URL details."}), 500
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Update URL
|
||||
# Path: PUT /api/urls/<url_id>
|
||||
# --------------------------
|
||||
@bp.route('/urls/<string:url_id>', methods=['PUT'])
|
||||
@token_required
|
||||
def update_url(current_user, url_id):
|
||||
"""
|
||||
Update specific fields of a URL entry (title, starred, note, keywords).
|
||||
Uses URLUpdateSchema for input validation.
|
||||
Verifies user access via the associated project.
|
||||
Triggers project keyword recalculation if keywords are changed.
|
||||
Returns simplified updated URL info using URLListSchema.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in update_url: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not URLUpdateSchema or not URLListSchema or not ValidationError:
|
||||
return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
# Get and validate JSON data using the schema
|
||||
json_data = request.get_json() or {}
|
||||
update_schema = URLUpdateSchema()
|
||||
try:
|
||||
# Load validates only the allowed fields (title, starred, note, keywords)
|
||||
validated_data = update_schema.load(json_data)
|
||||
except ValidationError as err:
|
||||
logger.warning(f"Update URL validation failed: {err.messages}")
|
||||
return jsonify(err.messages), 422
|
||||
|
||||
# If validation passed but no valid fields were provided
|
||||
if not validated_data:
|
||||
return jsonify({"message": "No valid fields provided for update."}), 400
|
||||
|
||||
try:
|
||||
# Validate URL ID format
|
||||
try:
|
||||
obj_url_id = ObjectId(url_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid URL ID format."}), 400
|
||||
|
||||
db = mongo.db
|
||||
# --- Find URL and Verify Access ---
|
||||
# Fetch projectId needed for access check
|
||||
url_doc = db.urls.find_one({"_id": obj_url_id}, {"projectId": 1})
|
||||
if not url_doc:
|
||||
return jsonify({"message": "URL not found."}), 404
|
||||
|
||||
project_obj_id = url_doc.get("projectId")
|
||||
if not project_obj_id or not isinstance(project_obj_id, ObjectId):
|
||||
logger.error(f"URL {url_id} has missing or invalid projectId during update.")
|
||||
return jsonify({"message": "URL data integrity issue (missing project link)."}), 500
|
||||
|
||||
project = db.projects.find_one(
|
||||
{"_id": project_obj_id},
|
||||
{"ownerId": 1, "collaborators": 1} # Projection for access check
|
||||
)
|
||||
if not project:
|
||||
logger.error(f"Project {project_obj_id} associated with URL {url_id} not found during update.")
|
||||
return jsonify({"message": "Associated project not found; cannot verify access."}), 404 # Or 500
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if not owner_id:
|
||||
logger.error(f"Project {project_obj_id} is missing ownerId during URL update.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "You do not have access to update URLs in this project."}), 403
|
||||
|
||||
# --- Prepare Update Fields from validated data ---
|
||||
update_fields = validated_data # Use the validated dictionary directly
|
||||
keywords_changed = "keywords" in update_fields # Check if keywords were part of the update
|
||||
|
||||
# Always update the 'updatedAt' timestamp
|
||||
update_fields["updatedAt"] = datetime.datetime.now(datetime.timezone.utc)
|
||||
|
||||
# --- Perform Update ---
|
||||
result = db.urls.update_one({"_id": obj_url_id}, {"$set": update_fields})
|
||||
|
||||
# --- Return Response ---
|
||||
if result.matched_count == 1:
|
||||
# Retrieve the updated URL doc to return simplified info
|
||||
updated_url_doc = db.urls.find_one(
|
||||
{"_id": obj_url_id},
|
||||
{"_id": 1, "title": 1, "url": 1} # Projection for list schema
|
||||
)
|
||||
if updated_url_doc:
|
||||
# Serialize using the list schema for consistency
|
||||
output_schema = URLListSchema()
|
||||
serialized_url = output_schema.dump(updated_url_doc)
|
||||
|
||||
# Trigger keyword recalc for the project in background if keywords changed
|
||||
if keywords_changed:
|
||||
try:
|
||||
async_recalc_project_keywords.delay(str(project_obj_id), str(user_id))
|
||||
logger.info(f"Queued keyword recalc task for project {project_obj_id} after URL {url_id} update.")
|
||||
except NameError:
|
||||
logger.error("Celery task 'async_recalc_project_keywords' not available during URL update.")
|
||||
except Exception as celery_err:
|
||||
logger.error(f"Failed to queue Celery recalc task for project {project_obj_id} after URL update: {celery_err}", exc_info=True)
|
||||
|
||||
return jsonify({"message": "URL updated successfully.", "url": serialized_url}), 200
|
||||
else:
|
||||
logger.warning(f"URL {url_id} updated but could not be retrieved.")
|
||||
return jsonify({"message": "URL updated successfully, but failed to retrieve updated data."}), 200
|
||||
else:
|
||||
# Matched count was 0
|
||||
return jsonify({"message": "URL update failed (document not found)."}), 404
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating URL {url_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while updating the URL."}), 500
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Delete URL
|
||||
# Path: DELETE /api/urls/<url_id>
|
||||
# --------------------------
|
||||
@bp.route('/urls/<string:url_id>', methods=['DELETE'])
|
||||
@token_required
|
||||
def delete_url(current_user, url_id):
|
||||
"""
|
||||
Delete a specific URL entry by its ID.
|
||||
Verifies user access via the associated project.
|
||||
Triggers project keyword recalculation after deletion.
|
||||
(No schema needed for input/output here)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in delete_url: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check DB connection
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
|
||||
try:
|
||||
# Validate URL ID format
|
||||
try:
|
||||
obj_url_id = ObjectId(url_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid URL ID format."}), 400
|
||||
|
||||
db = mongo.db
|
||||
# --- Find URL and Verify Access ---
|
||||
# Fetch projectId needed for access check and recalc trigger
|
||||
url_doc = db.urls.find_one({"_id": obj_url_id}, {"projectId": 1})
|
||||
if not url_doc:
|
||||
return jsonify({"message": "URL not found."}), 404
|
||||
|
||||
project_obj_id = url_doc.get("projectId")
|
||||
if not project_obj_id or not isinstance(project_obj_id, ObjectId):
|
||||
logger.error(f"URL {url_id} has missing or invalid projectId during delete.")
|
||||
return jsonify({"message": "URL data integrity issue (missing project link)."}), 500
|
||||
|
||||
project = db.projects.find_one(
|
||||
{"_id": project_obj_id},
|
||||
{"ownerId": 1, "collaborators": 1} # Projection for access check
|
||||
)
|
||||
# If associated project is missing, we cannot verify access, deny deletion.
|
||||
if not project:
|
||||
logger.error(f"Project {project_obj_id} associated with URL {url_id} not found during delete.")
|
||||
return jsonify({"message": "Cannot verify access; associated project missing."}), 403 # Deny access
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if not owner_id:
|
||||
logger.error(f"Project {project_obj_id} is missing ownerId during URL delete.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
# Check if user has access rights (owner or collaborator)
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "You do not have permission to delete URLs in this project."}), 403
|
||||
|
||||
# --- Perform Deletion ---
|
||||
delete_result = db.urls.delete_one({"_id": obj_url_id})
|
||||
|
||||
# --- Return Response ---
|
||||
if delete_result.deleted_count == 1:
|
||||
# Trigger keyword recalc for the project in background after successful URL deletion
|
||||
try:
|
||||
async_recalc_project_keywords.delay(str(project_obj_id), str(user_id))
|
||||
logger.info(f"Queued keyword recalc task for project {project_obj_id} after URL {url_id} deletion.")
|
||||
except NameError:
|
||||
logger.error("Celery task 'async_recalc_project_keywords' not available during URL deletion.")
|
||||
except Exception as celery_err:
|
||||
logger.error(f"Failed to queue Celery recalc task for project {project_obj_id} after URL deletion: {celery_err}", exc_info=True)
|
||||
# Still return success for the deletion itself
|
||||
|
||||
return jsonify({"message": "URL deleted successfully."}), 200 # 200 OK or 204 No Content
|
||||
else:
|
||||
# Document existed (find_one succeeded) but delete failed
|
||||
logger.error(f"URL {obj_url_id} found but delete_one failed (deleted_count=0).")
|
||||
return jsonify({"message": "Failed to delete URL (already deleted?)."}), 404 # Or 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting URL {url_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred while deleting the URL."}), 500
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Celery Task Trigger Endpoints
|
||||
# Path: PUT /api/urls/<url_id>/extract_title_and_keywords
|
||||
# Path: PUT /api/urls/<url_id>/summarize
|
||||
# --------------------------
|
||||
@bp.route('/urls/<string:url_id>/extract_title_and_keywords', methods=['PUT'])
|
||||
@token_required
|
||||
def trigger_extract_title_and_keywords(current_user, url_id):
|
||||
"""
|
||||
Manually triggers the background task for extracting title and keywords for a URL.
|
||||
Verifies user access via the associated project.
|
||||
Sets processingStatus to 'pending'.
|
||||
(No schema needed for input/output here)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
user_id_str = str(user_id) # Keep string version for Celery task
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in trigger_extract: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check DB connection
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
|
||||
try:
|
||||
# Validate URL ID format
|
||||
try:
|
||||
obj_url_id = ObjectId(url_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid URL ID format."}), 400
|
||||
|
||||
db = mongo.db
|
||||
# --- Find URL and Verify Access ---
|
||||
url_doc = db.urls.find_one({"_id": obj_url_id}, {"projectId": 1})
|
||||
if not url_doc:
|
||||
return jsonify({"message": "URL not found."}), 404
|
||||
|
||||
project_obj_id = url_doc.get("projectId")
|
||||
if not project_obj_id or not isinstance(project_obj_id, ObjectId):
|
||||
logger.error(f"URL {url_id} has missing or invalid projectId during trigger_extract.")
|
||||
return jsonify({"message": "URL data integrity issue (missing project link)."}), 500
|
||||
|
||||
project = db.projects.find_one(
|
||||
{"_id": project_obj_id},
|
||||
{"ownerId": 1, "collaborators": 1} # Projection for access check
|
||||
)
|
||||
if not project:
|
||||
logger.error(f"Project {project_obj_id} associated with URL {url_id} not found during trigger_extract.")
|
||||
return jsonify({"message": "Associated project not found; cannot verify access."}), 404 # Or 500
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if not owner_id:
|
||||
logger.error(f"Project {project_obj_id} is missing ownerId during trigger_extract.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "You do not have access to trigger processing for this URL."}), 403
|
||||
|
||||
# --- Update Status and Queue Task ---
|
||||
# Set status to pending before queueing
|
||||
db.urls.update_one({"_id": obj_url_id},
|
||||
{"$set": {"processingStatus": "pending", "updatedAt": datetime.datetime.now(datetime.timezone.utc)}})
|
||||
|
||||
try:
|
||||
# Queue the Celery task
|
||||
async_extract_title_and_keywords.delay(url_id, user_id_str)
|
||||
logger.info(f"Queued title/keyword extraction task for URL {url_id}")
|
||||
return jsonify({"message": "Title and keyword extraction task queued successfully."}), 202 # 202 Accepted
|
||||
except NameError:
|
||||
logger.error("Celery task 'async_extract_title_and_keywords' is not defined or imported correctly.")
|
||||
# Revert status? Or leave as pending with error? Let's leave as pending.
|
||||
return jsonify({"message": "Server configuration error: Extraction feature unavailable."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error queueing extraction task for URL {url_id}: {e}", exc_info=True)
|
||||
# Revert status? Or leave as pending with error? Let's leave as pending.
|
||||
return jsonify({"message": "An error occurred while queueing the extraction task."}), 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in trigger_extract_title_and_keywords endpoint for URL {url_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An internal error occurred before queueing the task."}), 500
|
||||
|
||||
|
||||
@bp.route('/urls/<string:url_id>/summarize', methods=['PUT'])
|
||||
@token_required
|
||||
def trigger_summarize_url(current_user, url_id):
|
||||
"""
|
||||
Manually triggers the background task for summarizing a URL.
|
||||
Verifies user access via the associated project.
|
||||
Determines whether to use Gemini based on user's selected API key.
|
||||
Sets processingStatus to 'pending'.
|
||||
(No schema needed for input/output here)
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
user_id_str = str(user_id) # Keep string version for Celery task
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in trigger_summarize: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check DB connection
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
|
||||
try:
|
||||
# Validate URL ID format
|
||||
try:
|
||||
obj_url_id = ObjectId(url_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid URL ID format."}), 400
|
||||
|
||||
db = mongo.db
|
||||
# --- Find URL and Verify Access ---
|
||||
url_doc = db.urls.find_one({"_id": obj_url_id}, {"projectId": 1})
|
||||
if not url_doc:
|
||||
return jsonify({"message": "URL not found."}), 404
|
||||
|
||||
project_obj_id = url_doc.get("projectId")
|
||||
if not project_obj_id or not isinstance(project_obj_id, ObjectId):
|
||||
logger.error(f"URL {url_id} has missing or invalid projectId during trigger_summarize.")
|
||||
return jsonify({"message": "URL data integrity issue (missing project link)."}), 500
|
||||
|
||||
project = db.projects.find_one(
|
||||
{"_id": project_obj_id},
|
||||
{"ownerId": 1, "collaborators": 1} # Projection for access check
|
||||
)
|
||||
if not project:
|
||||
logger.error(f"Project {project_obj_id} associated with URL {url_id} not found during trigger_summarize.")
|
||||
return jsonify({"message": "Associated project not found; cannot verify access."}), 404 # Or 500
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if not owner_id:
|
||||
logger.error(f"Project {project_obj_id} is missing ownerId during trigger_summarize.")
|
||||
return jsonify({"message": "Project data integrity issue."}), 500
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "You do not have access to trigger processing for this URL."}), 403
|
||||
|
||||
# --- Update Status, Check API Key, and Queue Task ---
|
||||
# Set status to pending before queueing
|
||||
db.urls.update_one({"_id": obj_url_id},
|
||||
{"$set": {"processingStatus": "pending", "updatedAt": datetime.datetime.now(datetime.timezone.utc)}})
|
||||
|
||||
# Check for user's selected Gemini API key
|
||||
api_doc = db.api_list.find_one({"uid": user_id, "selected": True, "name": "Gemini"})
|
||||
use_gemini = bool(api_doc and api_doc.get("key")) # True if Gemini selected and key exists
|
||||
|
||||
try:
|
||||
# Queue the Celery task, passing the use_gemini flag
|
||||
async_summarize_url.delay(url_id, user_id_str, use_gemini)
|
||||
logger.info(f"Queued summarization task for URL {url_id} (use_gemini={use_gemini})")
|
||||
return jsonify({"message": "Summarization task queued successfully."}), 202 # 202 Accepted
|
||||
except NameError:
|
||||
logger.error("Celery task 'async_summarize_url' is not defined or imported correctly.")
|
||||
# Revert status? Or leave as pending? Leave as pending.
|
||||
return jsonify({"message": "Server configuration error: Summarization feature unavailable."}), 500
|
||||
except Exception as e:
|
||||
logger.error(f"Error queueing summarization task for URL {url_id}: {e}", exc_info=True)
|
||||
# Revert status? Or leave as pending? Leave as pending.
|
||||
return jsonify({"message": "An error occurred while queueing the summarization task."}), 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in trigger_summarize_url endpoint for URL {url_id}: {e}", exc_info=True)
|
||||
return jsonify({"message": "An internal error occurred before queueing the task."}), 500
|
||||
|
||||
|
||||
# --------------------------
|
||||
# Search URLs within Project
|
||||
# Path: GET /api/projects/<project_id>/search?q=...
|
||||
# --------------------------
|
||||
@bp.route('/projects/<string:project_id>/search', methods=['GET'])
|
||||
@token_required
|
||||
def search_urls(current_user, project_id):
|
||||
"""
|
||||
Search for URLs within a specific project based on a query string.
|
||||
Uses URLSearchResultSchema for output serialization.
|
||||
Searches 'title', 'note', 'keywords.word', and 'summary' fields using regex.
|
||||
Returns a simplified list (id, title, url) of matching URLs.
|
||||
Verifies user access to the project.
|
||||
"""
|
||||
logger = _get_logger()
|
||||
# Validate user object from token
|
||||
if not current_user or not current_user.get("_id"):
|
||||
return jsonify({"message": "Internal authorization error."}), 500
|
||||
try:
|
||||
user_id = ObjectId(current_user["_id"])
|
||||
except (InvalidId, TypeError) as e:
|
||||
logger.error(f"User ID conversion error in search_urls: {e}")
|
||||
return jsonify({"message": "Invalid user ID format in token."}), 400
|
||||
|
||||
# Check dependencies
|
||||
if not mongo: return jsonify({"message": "Database connection not available."}), 500
|
||||
if not URLSearchResultSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500
|
||||
|
||||
try:
|
||||
# Get search query string from query parameters
|
||||
query_str = request.args.get("q", "").strip()
|
||||
# If query string is empty, return empty results immediately
|
||||
if not query_str:
|
||||
return jsonify({"results": []}), 200
|
||||
|
||||
# Validate project ID format from URL path
|
||||
try:
|
||||
obj_project_id = ObjectId(project_id)
|
||||
except InvalidId:
|
||||
return jsonify({"message": "Invalid project ID format in URL path."}), 400
|
||||
|
||||
db = mongo.db
|
||||
# --- Verify Project Access ---
|
||||
project = db.projects.find_one(
|
||||
{"_id": obj_project_id},
|
||||
{"ownerId": 1, "collaborators": 1} # Projection for access check
|
||||
)
|
||||
if not project:
|
||||
return jsonify({"message": "Project not found."}), 404
|
||||
|
||||
owner_id = project.get("ownerId")
|
||||
collaborators = project.get("collaborators", [])
|
||||
if owner_id != user_id and user_id not in collaborators:
|
||||
return jsonify({"message": "Access denied to search URLs in this project."}), 403
|
||||
|
||||
# --- Perform Search using Aggregation Pipeline ---
|
||||
# Escape regex special characters in the query string for safety
|
||||
escaped_query = re.escape(query_str)
|
||||
search_pipeline = [
|
||||
{"$match": {"projectId": obj_project_id}},
|
||||
{"$match": {
|
||||
"$or": [
|
||||
{"title": {"$regex": escaped_query, "$options": "i"}},
|
||||
{"note": {"$regex": escaped_query, "$options": "i"}},
|
||||
{"keywords.word": {"$regex": escaped_query, "$options": "i"}},
|
||||
{"summary": {"$regex": escaped_query, "$options": "i"}}
|
||||
]
|
||||
}},
|
||||
# Project only fields needed by the output schema
|
||||
{"$project": {"_id": 1, "title": 1, "url": 1, "updatedAt": 1}},
|
||||
{"$sort": {"updatedAt": -1}} # Sort by update time
|
||||
# Add $limit stage if needed
|
||||
]
|
||||
|
||||
# Execute the aggregation pipeline
|
||||
results_cursor = db.urls.aggregate(search_pipeline)
|
||||
search_result_docs = list(results_cursor) # Convert cursor to list
|
||||
|
||||
# --- Serialize results using the schema ---
|
||||
output_schema = URLSearchResultSchema(many=True)
|
||||
# Schema handles ObjectId conversion and field selection
|
||||
serialized_result = output_schema.dump(search_result_docs)
|
||||
|
||||
# Return the search results
|
||||
return jsonify({"results": serialized_result}), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching URLs in project {project_id} with query '{query_str}': {e}", exc_info=True)
|
||||
return jsonify({"message": "An error occurred during URL search."}), 500
|
||||
|
||||
93
backend_flask/myapp/utils.py
Normal file
93
backend_flask/myapp/utils.py
Normal file
@ -0,0 +1,93 @@
|
||||
# utils/auth.py (or wherever token_required is defined)
|
||||
|
||||
from functools import wraps
|
||||
import secrets
|
||||
import jwt
|
||||
from flask import request, jsonify, current_app # <-- Import current_app
|
||||
# Config might still be needed for default algorithm if not in app.config
|
||||
# from backend.config import Config # Keep if needed for defaults, but prefer current_app.config
|
||||
|
||||
# TODO Flask cannot find config inside the utils
|
||||
from .config import Config # Example if config.py is in the same dir
|
||||
from bson.objectid import ObjectId
|
||||
# Remove direct import of mongo
|
||||
|
||||
|
||||
def token_required(f):
|
||||
"""
|
||||
Decorator to ensure a valid JWT token is present in the request header
|
||||
and injects the corresponding user document into the decorated function.
|
||||
"""
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
token = None
|
||||
auth_header = request.headers.get("Authorization")
|
||||
if auth_header:
|
||||
# Check for "Bearer " prefix and extract token
|
||||
parts = auth_header.split()
|
||||
if len(parts) == 2 and parts[0].lower() == "bearer":
|
||||
token = parts[1]
|
||||
# Optional: Allow raw token directly (as in original code)
|
||||
elif len(parts) == 1:
|
||||
token = auth_header
|
||||
|
||||
if not token:
|
||||
return jsonify({"message": "Token is missing."}), 401
|
||||
|
||||
try:
|
||||
# Use current_app.config to access SECRET_KEY and JWT_ALGORITHM
|
||||
secret_key = current_app.config['SECRET_KEY']
|
||||
# Provide a default algorithm if not explicitly configured
|
||||
algorithm = current_app.config.get('JWT_ALGORITHM', Config.JWT_ALGORITHM or 'HS256')
|
||||
|
||||
# Decode the token
|
||||
data = jwt.decode(token, secret_key, algorithms=[algorithm])
|
||||
|
||||
# --- Use current_app to access mongo ---
|
||||
user_id_str = data.get("user_id")
|
||||
if not user_id_str:
|
||||
return jsonify({"message": "Token payload missing user_id."}), 401
|
||||
|
||||
# Access the 'users' collection via the mongo instance attached to current_app
|
||||
current_user_doc = current_app.mongo.db.users.find_one({"_id": ObjectId(user_id_str)})
|
||||
# --- End database access change ---
|
||||
|
||||
if not current_user_doc:
|
||||
# Even if token is valid, user might have been deleted
|
||||
return jsonify({"message": "User associated with token not found."}), 401
|
||||
|
||||
# Convert ObjectId back to string for consistency if needed,
|
||||
# or pass the whole document as is. Passing document is often useful.
|
||||
# current_user_doc['_id'] = str(current_user_doc['_id']) # Optional conversion
|
||||
|
||||
except jwt.ExpiredSignatureError:
|
||||
# Specific error for expired token
|
||||
return jsonify({"message": "Token has expired."}), 401
|
||||
except jwt.InvalidTokenError as e:
|
||||
# Specific error for other JWT validation issues
|
||||
current_app.logger.warning(f"Invalid token encountered: {e}") # Log the specific error
|
||||
return jsonify({"message": "Token is invalid."}), 401
|
||||
except Exception as e:
|
||||
# Catch other potential errors (e.g., ObjectId conversion, DB connection issues)
|
||||
current_app.logger.error(f"Error during token verification: {e}", exc_info=True)
|
||||
# Return a more generic message for unexpected
|
||||
return jsonify({"message": "Token verification failed."}), 401
|
||||
|
||||
# Inject the fetched user document into the decorated function
|
||||
return f(current_user_doc, *args, **kwargs)
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
# This is a placeholder for background task functions.
|
||||
# For example, you could use Celery to process URLs asynchronously.
|
||||
def process_url(url_id):
|
||||
# Retrieve URL document by url_id, perform scraping, summarization, and update processingStatus.
|
||||
# This function should be called by a background worker.
|
||||
pass
|
||||
|
||||
|
||||
# This function will generate a pass key for frontend-backend communication
|
||||
def generate_passkey():
|
||||
return secrets.token_hex(16)
|
||||
|
||||
104
backend_flask/requirements.txt
Normal file
104
backend_flask/requirements.txt
Normal file
@ -0,0 +1,104 @@
|
||||
aiohappyeyeballs==2.6.1
|
||||
aiohttp==3.11.16
|
||||
aiosignal==1.3.2
|
||||
amqp==5.3.1
|
||||
annotated-types==0.7.0
|
||||
anyio==4.9.0
|
||||
attrs==25.3.0
|
||||
backend==0.2.4.1
|
||||
beautifulsoup4==4.13.3
|
||||
billiard==4.2.1
|
||||
blinker==1.9.0
|
||||
cachetools==5.5.2
|
||||
celery==5.5.1
|
||||
certifi==2025.1.31
|
||||
charset-normalizer==3.4.1
|
||||
click==8.1.8
|
||||
click-didyoumean==0.3.1
|
||||
click-plugins==1.1.1
|
||||
click-repl==0.3.0
|
||||
dataclasses-json==0.6.7
|
||||
dnspython==2.7.0
|
||||
faiss-cpu==1.10.0
|
||||
Flask==3.1.0
|
||||
flask-cors==5.0.1
|
||||
Flask-JWT-Extended==4.7.1
|
||||
flask-marshmallow==1.3.0
|
||||
Flask-PyMongo==2.3.0
|
||||
frozenlist==1.5.0
|
||||
google-ai-generativelanguage==0.6.15
|
||||
google-api-core==2.24.2
|
||||
google-api-python-client==2.166.0
|
||||
google-auth==2.38.0
|
||||
google-auth-httplib2==0.2.0
|
||||
google-generativeai==0.8.4
|
||||
googleapis-common-protos==1.69.2
|
||||
greenlet==3.1.1
|
||||
grpcio==1.71.0
|
||||
grpcio-status==1.71.0
|
||||
h11==0.14.0
|
||||
httpcore==1.0.8
|
||||
httplib2==0.22.0
|
||||
httpx==0.28.1
|
||||
httpx-sse==0.4.0
|
||||
idna==3.10
|
||||
importlib-metadata==4.13.0
|
||||
iniconfig==2.1.0
|
||||
itsdangerous==2.2.0
|
||||
Jinja2==3.1.6
|
||||
jsonpatch==1.33
|
||||
jsonpointer==3.0.0
|
||||
kombu==5.5.2
|
||||
langchain==0.3.23
|
||||
langchain-community==0.3.21
|
||||
langchain-core==0.3.51
|
||||
langchain-text-splitters==0.3.8
|
||||
langsmith==0.3.30
|
||||
MarkupSafe==3.0.2
|
||||
marshmallow==3.26.1
|
||||
multidict==6.4.3
|
||||
mypy-extensions==1.0.0
|
||||
numpy==1.26.4
|
||||
orjson==3.10.16
|
||||
packaging==24.2
|
||||
pluggy==1.5.0
|
||||
prompt_toolkit==3.0.50
|
||||
propcache==0.3.1
|
||||
proto-plus==1.26.1
|
||||
protobuf==5.29.4
|
||||
pyasn1==0.6.1
|
||||
pyasn1_modules==0.4.2
|
||||
pydantic==2.11.3
|
||||
pydantic-settings==2.8.1
|
||||
pydantic_core==2.33.1
|
||||
PyJWT==2.10.1
|
||||
pymongo==4.12.0
|
||||
pyparsing==3.2.3
|
||||
pytest==8.3.5
|
||||
python-dateutil==2.9.0.post0
|
||||
python-dotenv==1.1.0
|
||||
PyYAML==6.0.2
|
||||
redis==5.2.1
|
||||
repoze.lru==0.7
|
||||
requests==2.32.3
|
||||
requests-toolbelt==1.0.0
|
||||
Routes==2.5.1
|
||||
rsa==4.9
|
||||
six==1.17.0
|
||||
sniffio==1.3.1
|
||||
soupsieve==2.6
|
||||
SQLAlchemy==2.0.40
|
||||
tenacity==9.1.2
|
||||
tqdm==4.67.1
|
||||
typing-inspect==0.9.0
|
||||
typing-inspection==0.4.0
|
||||
typing_extensions==4.13.2
|
||||
tzdata==2025.2
|
||||
uritemplate==4.1.1
|
||||
urllib3==2.4.0
|
||||
vine==5.1.0
|
||||
wcwidth==0.2.13
|
||||
Werkzeug==3.1.3
|
||||
yarl==1.19.0
|
||||
zipp==3.21.0
|
||||
zstandard==0.23.0
|
||||
32
backend_flask/run.py
Normal file
32
backend_flask/run.py
Normal file
@ -0,0 +1,32 @@
|
||||
# backend/run.py
|
||||
|
||||
|
||||
import os
|
||||
from myapp import create_app # Import the factory function
|
||||
|
||||
# Determine the configuration to use (e.g., from environment variable)
|
||||
# Default to 'development' if FLASK_CONFIG is not set
|
||||
config_name = os.environ.get('FLASK_CONFIG', 'development')
|
||||
|
||||
# Create the Flask app instance using the factory
|
||||
app = create_app(config_name)
|
||||
|
||||
# Run the development server
|
||||
if __name__ == "__main__":
|
||||
# Get host and port from environment variables or use defaults
|
||||
host = os.environ.get('FLASK_RUN_HOST', '0.0.0.0')
|
||||
try:
|
||||
port = int(os.environ.get('FLASK_RUN_PORT', '5000'))
|
||||
except ValueError:
|
||||
port = 5000
|
||||
|
||||
# Use Flask's built-in server for development.
|
||||
# Debug mode should be controlled by the configuration loaded in create_app.
|
||||
# app.run() will use app.config['DEBUG'] automatically.
|
||||
print(f"Starting Flask server on {host}:{port} with config '{config_name}'...")
|
||||
app.run(host=host, port=port)
|
||||
|
||||
# For production, you would typically use a WSGI server like Gunicorn or uWSGI:
|
||||
# Example: gunicorn -w 4 -b 0.0.0.0:5000 "run:create_app('production')"
|
||||
|
||||
|
||||
51
backend_flask/test_auth_init.py
Normal file
51
backend_flask/test_auth_init.py
Normal file
@ -0,0 +1,51 @@
|
||||
# backend/test_auth_init.py
|
||||
# Purpose: Directly test the core logic of myapp/auth/__init__.py
|
||||
|
||||
import sys
|
||||
import os
|
||||
import traceback
|
||||
|
||||
print("--- Starting test_auth_init.py ---")
|
||||
|
||||
# --- Setup Path ---
|
||||
# Get the absolute path of the directory containing this script (backend/)
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
# Get the project root directory (SurfSmart/)
|
||||
project_root = os.path.dirname(current_dir)
|
||||
# Add project root to sys.path to allow 'import backend.myapp.auth' later if needed
|
||||
# and potentially allow flask.Blueprint to resolve correctly if there are path issues.
|
||||
if project_root not in sys.path:
|
||||
sys.path.insert(0, project_root)
|
||||
print(f"Test script added project root: {project_root}")
|
||||
print(f"Test script current sys.path: {sys.path}")
|
||||
print(f"Test script current working directory: {os.getcwd()}")
|
||||
|
||||
# --- Test Core Logic ---
|
||||
bp_instance = None # Initialize to None
|
||||
try:
|
||||
print("\nAttempting: from flask import Blueprint")
|
||||
from flask import Blueprint
|
||||
print("Successfully imported Blueprint")
|
||||
|
||||
print("\nAttempting: bp = Blueprint('auth', __name__, url_prefix='/api/auth')")
|
||||
# Use a different variable name just in case 'bp' has weird conflicts
|
||||
test_bp = Blueprint('auth', __name__, url_prefix='/api/auth')
|
||||
bp_instance = test_bp # Assign to check later
|
||||
print(f"Successfully instantiated Blueprint: {test_bp}")
|
||||
print(f"Type of test_bp: {type(test_bp)}")
|
||||
|
||||
except ImportError as e:
|
||||
print(f"\nERROR during import: {e}")
|
||||
traceback.print_exc()
|
||||
except Exception as e:
|
||||
print(f"\nUNEXPECTED ERROR during instantiation: {e}")
|
||||
traceback.print_exc()
|
||||
|
||||
# --- Final Check ---
|
||||
print("\n--- Final Check ---")
|
||||
if bp_instance is not None:
|
||||
print(f"Variable 'bp_instance' was assigned successfully: {bp_instance}")
|
||||
else:
|
||||
print("Variable 'bp_instance' was NOT assigned (likely due to error above).")
|
||||
|
||||
print("--- Finished test_auth_init.py ---")
|
||||
0
frontend_react/.Rhistory
Normal file
0
frontend_react/.Rhistory
Normal file
24
frontend_react/.gitignore
vendored
Normal file
24
frontend_react/.gitignore
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
12
frontend_react/README.md
Normal file
12
frontend_react/README.md
Normal file
@ -0,0 +1,12 @@
|
||||
# React + Vite
|
||||
|
||||
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
|
||||
|
||||
Currently, two official plugins are available:
|
||||
|
||||
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react/README.md) uses [Babel](https://babeljs.io/) for Fast Refresh
|
||||
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
|
||||
|
||||
## Expanding the ESLint configuration
|
||||
|
||||
If you are developing a production application, we recommend using TypeScript and enable type-aware lint rules. Check out the [TS template](https://github.com/vitejs/vite/tree/main/packages/create-vite/template-react-ts) to integrate TypeScript and [`typescript-eslint`](https://typescript-eslint.io) in your project.
|
||||
33
frontend_react/eslint.config.js
Normal file
33
frontend_react/eslint.config.js
Normal file
@ -0,0 +1,33 @@
|
||||
import js from '@eslint/js'
|
||||
import globals from 'globals'
|
||||
import reactHooks from 'eslint-plugin-react-hooks'
|
||||
import reactRefresh from 'eslint-plugin-react-refresh'
|
||||
|
||||
export default [
|
||||
{ ignores: ['dist'] },
|
||||
{
|
||||
files: ['**/*.{js,jsx}'],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2020,
|
||||
globals: globals.browser,
|
||||
parserOptions: {
|
||||
ecmaVersion: 'latest',
|
||||
ecmaFeatures: { jsx: true },
|
||||
sourceType: 'module',
|
||||
},
|
||||
},
|
||||
plugins: {
|
||||
'react-hooks': reactHooks,
|
||||
'react-refresh': reactRefresh,
|
||||
},
|
||||
rules: {
|
||||
...js.configs.recommended.rules,
|
||||
...reactHooks.configs.recommended.rules,
|
||||
'no-unused-vars': ['error', { varsIgnorePattern: '^[A-Z_]' }],
|
||||
'react-refresh/only-export-components': [
|
||||
'warn',
|
||||
{ allowConstantExport: true },
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
13
frontend_react/index.html
Normal file
13
frontend_react/index.html
Normal file
@ -0,0 +1,13 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Vite + React</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.jsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
2825
frontend_react/package-lock.json
generated
Normal file
2825
frontend_react/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
31
frontend_react/package.json
Normal file
31
frontend_react/package.json
Normal file
@ -0,0 +1,31 @@
|
||||
{
|
||||
"name": "surfsmart_react",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
"@dnd-kit/sortable": "^10.0.0",
|
||||
"dnd-kit": "^0.0.2",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"react-icons": "^5.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.21.0",
|
||||
"@types/react": "^19.0.10",
|
||||
"@types/react-dom": "^19.0.4",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
"eslint": "^9.21.0",
|
||||
"eslint-plugin-react-hooks": "^5.1.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.19",
|
||||
"globals": "^15.15.0",
|
||||
"vite": "^6.2.0"
|
||||
}
|
||||
}
|
||||
1
frontend_react/public/vite.svg
Normal file
1
frontend_react/public/vite.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
90
frontend_react/src/App.jsx
Normal file
90
frontend_react/src/App.jsx
Normal file
@ -0,0 +1,90 @@
|
||||
// frontend/src/App.jsx
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import LeftSidebar from './components/LeftSidebar/LeftSidebar.jsx';
|
||||
import MainContent from './components/MainContent/MainContent.jsx';
|
||||
import LoginPage from './components/LoginPage/LoginPage.jsx';
|
||||
import styles from './App.module.css';
|
||||
import { getAuthToken } from './services/api'; // Assuming fetchProjects is not needed here directly
|
||||
|
||||
/**
|
||||
* App Component
|
||||
*
|
||||
* Manages authentication state AND the currently selected project ID.
|
||||
* Renders either LoginPage or the main layout.
|
||||
*/
|
||||
function App() {
|
||||
const [isLoggedIn, setIsLoggedIn] = useState(false);
|
||||
const [authChecked, setAuthChecked] = useState(false);
|
||||
// --- State for selected project ID ---
|
||||
const [currentProjectId, setCurrentProjectId] = useState(null); // Initialize to null
|
||||
|
||||
useEffect(() => {
|
||||
const token = getAuthToken();
|
||||
if (token) {
|
||||
setIsLoggedIn(true);
|
||||
// If logged in, we might want to fetch projects and set an initial ID,
|
||||
// but that logic is currently in LeftSidebar. We'll let LeftSidebar
|
||||
// trigger the initial selection via the callback for now.
|
||||
} else {
|
||||
// Ensure currentProjectId is reset if no token found
|
||||
setCurrentProjectId(null);
|
||||
}
|
||||
setAuthChecked(true);
|
||||
}, []);
|
||||
|
||||
const handleLoginSuccess = () => {
|
||||
setIsLoggedIn(true);
|
||||
// Reset project ID on new login, let LeftSidebar set the initial one
|
||||
setCurrentProjectId(null);
|
||||
};
|
||||
|
||||
const handleLogout = () => {
|
||||
localStorage.removeItem('authToken');
|
||||
setIsLoggedIn(false);
|
||||
setCurrentProjectId(null); // Reset project ID on logout
|
||||
console.log('User logged out.');
|
||||
};
|
||||
|
||||
// --- Handler function to be passed to LeftSidebar ---
|
||||
const handleProjectSelect = (projectId) => {
|
||||
console.log("App: Project selected:", projectId);
|
||||
setCurrentProjectId(projectId);
|
||||
};
|
||||
// --- End handler ---
|
||||
|
||||
console.log('Render - isLoggedIn state:', isLoggedIn);
|
||||
console.log('Render - currentProjectId state:', currentProjectId);
|
||||
|
||||
|
||||
if (!authChecked) {
|
||||
return <div>Loading Authentication...</div>; // Or a loading spinner
|
||||
}
|
||||
|
||||
const containerClassName = isLoggedIn
|
||||
? `${styles.appContainer} ${styles.loggedInLayout}`
|
||||
: styles.appContainer;
|
||||
|
||||
console.log('Applied className:', containerClassName);
|
||||
|
||||
return (
|
||||
<div className={containerClassName}>
|
||||
{isLoggedIn ? (
|
||||
<>
|
||||
{/* Pass down currentProjectId and the selection handler */}
|
||||
<LeftSidebar
|
||||
onLogout={handleLogout}
|
||||
onProjectSelect={handleProjectSelect}
|
||||
currentProjectId={currentProjectId} // Pass current ID for highlighting
|
||||
/>
|
||||
{/* Pass down currentProjectId */}
|
||||
<MainContent currentProjectId={currentProjectId} />
|
||||
{/* Blank columns handled by CSS Grid */}
|
||||
</>
|
||||
) : (
|
||||
<LoginPage onLoginSuccess={handleLoginSuccess} />
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default App;
|
||||
150
frontend_react/src/App.module.css
Normal file
150
frontend_react/src/App.module.css
Normal file
@ -0,0 +1,150 @@
|
||||
/* App.module.css */
|
||||
|
||||
/* Define reusable variables globally within this module scope or use :global(:root) */
|
||||
:global(:root) {
|
||||
/* Color Palette */
|
||||
--primary-color: #b2e3b6; /* 柔和浅绿色 */
|
||||
--primary-hover-color: #9fd4a6; /* 悬停稍深一点 */
|
||||
--primary-active-color: #89c897; /* 点击时进一步加深 */
|
||||
--secondary-color: #a9b9ac; /* 中性色,偏灰绿 */
|
||||
--secondary-hover-color: #95a89b;
|
||||
--accent-color: #76c28f; /* 强调色,稍饱和 */
|
||||
--accent-hover-color: #5bab74;
|
||||
|
||||
--ai-background: #799fff91;
|
||||
--ai-background-hover: #627cca75;
|
||||
--ai-background-activate: #4063cc7c;
|
||||
--ai-text: #d40000;
|
||||
--ai-text-hover: #7e2525;
|
||||
--ai-text-activate: #641313;
|
||||
|
||||
--success-color: #6fbf73; /* 成功提示,温和的绿 */
|
||||
--danger-color: #dc6b6b; /* 警告/错误保留红色但稍柔和 */
|
||||
--warning-color: #e6c87f; /* 黄色提示柔化处理 */
|
||||
--light-color: #f3f8f4; /* 浅绿色背景,替代全白 */
|
||||
--white-color: #ffffff;
|
||||
--dark-color: #2e3d31; /* 深色,但不纯黑 */
|
||||
--text-color-primary: #1d2b21; /* 主文字色,深灰绿 */
|
||||
--text-color-secondary: #5c6e5f; /* 次文字色,浅灰绿 */
|
||||
--text-color-light: #3a4b3f; /* 用于反白场景下的文字 */
|
||||
|
||||
--border-color: #cbd5cb;
|
||||
--border-radius-sm: 0.25rem;
|
||||
--border-radius-md: 0.375rem;
|
||||
--border-radius-lg: 0.5rem;
|
||||
|
||||
/* Background Arc Colors (协调的透明绿色调) */
|
||||
--arc-color-1: rgba(183, 228, 184, 0.25); /* 轻绿 */
|
||||
--arc-color-2: rgba(169, 209, 174, 0.2); /* 绿灰 */
|
||||
--arc-color-3: rgba(202, 235, 210, 0.3); /* 白绿 */
|
||||
|
||||
/* Shadows */
|
||||
--shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.03);
|
||||
--shadow-md: 0 3px 6px rgba(0, 0, 0, 0.06);
|
||||
--shadow-lg: 0 10px 20px rgba(0, 0, 0, 0.08);
|
||||
|
||||
/* Transitions */
|
||||
--transition-fast: all 0.15s ease-in-out;
|
||||
--transition-base: all 0.2s ease-in-out;
|
||||
|
||||
/* Spacing */
|
||||
--spacing-xs: 4px;
|
||||
--spacing-sm: 8px;
|
||||
--spacing-md: 16px;
|
||||
--spacing-lg: 24px;
|
||||
--spacing-xl: 32px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* Base styles for the app container */
|
||||
.appContainer {
|
||||
min-height: 100vh;
|
||||
/* background-color: var(--light-color); */ /* Background now handled by ::before */
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
position: relative; /* Needed for z-index stacking context if ::before uses absolute */
|
||||
z-index: 1; /* Ensure content is above the ::before pseudo-element */
|
||||
}
|
||||
|
||||
/* --- Fixed Background with Arcs using ::before --- */
|
||||
.appContainer::before {
|
||||
content: '';
|
||||
position: fixed; /* Fixed relative to viewport */
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
z-index: -1; /* Place behind the content */
|
||||
background-color: var(--light-color); /* Base background color */
|
||||
|
||||
/* --- SVG Background Image --- */
|
||||
/* Generated using SVG data URI. You can create more complex SVGs. */
|
||||
/* This example creates three large arcs from corners/edges */
|
||||
background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100' preserveAspectRatio='none'%3E%3C!-- Arc 1: Top Left --%3E%3Cpath d='M 0,0 L 0,50 A 50 50 0 0 1 50,0 Z' fill='rgba(174, 203, 255, 0.3)' /%3E%3C!-- Arc 2: Bottom Right --%3E%3Cpath d='M 100,100 L 50,100 A 50 50 0 0 1 100,50 Z' fill='rgba(255, 193, 174, 0.2)' /%3E%3C!-- Arc 3: Bottom Left --%3E%3Cpath d='M 0,100 L 0,70 A 80 80 0 0 0 70,100 Z' fill='rgba(167, 255, 174, 0.25)' /%3E%3C/svg%3E");
|
||||
|
||||
background-repeat: no-repeat;
|
||||
/* Adjust background size and position as needed */
|
||||
/* 'cover' might distort arcs, 'contain' might leave gaps */
|
||||
/* Using fixed size/positioning might be better */
|
||||
background-size: 100% 100%; /* Stretch SVG to container */
|
||||
/* Or position specific SVGs: */
|
||||
/* background-position: top left, bottom right, bottom left; */
|
||||
/* background-size: 50% auto, 50% auto, 80% auto; */
|
||||
}
|
||||
|
||||
|
||||
/* --- Styles applied ONLY when logged in (Grid Layout) --- */
|
||||
.appContainer.loggedInLayout {
|
||||
display: grid;
|
||||
align-items: stretch;
|
||||
grid-template-columns: 2fr 2fr 6fr 2fr; /* Default large screen */
|
||||
/* Ensure grid layout itself doesn't have conflicting background */
|
||||
background-color: transparent; /* Make grid container transparent */
|
||||
}
|
||||
|
||||
/* Assign components to specific grid columns ONLY when logged in */
|
||||
/* Make sure children have backgrounds so fixed background doesn't show through */
|
||||
.appContainer.loggedInLayout > :nth-child(1) { /* LeftSidebar */
|
||||
grid-column: 1 / 2;
|
||||
min-height: 100vh;
|
||||
background-color: var(--white-color); /* Give sidebar a background */
|
||||
z-index: 2; /* Ensure sidebar is above background */
|
||||
}
|
||||
|
||||
.appContainer.loggedInLayout > :nth-child(2) { /* MainContent */
|
||||
grid-column: 3 / 4;
|
||||
min-height: 100vh;
|
||||
overflow-y: auto;
|
||||
padding: var(--spacing-lg);
|
||||
box-sizing: border-box;
|
||||
background-color: transparent; /* Let appContainer::before show through blank columns */
|
||||
z-index: 2; /* Ensure content is above background */
|
||||
}
|
||||
|
||||
|
||||
/* --- Responsive Breakpoints for the LOGGED-IN layout --- */
|
||||
@media (max-width: 1200px) and (min-width: 1000px) {
|
||||
.appContainer.loggedInLayout {
|
||||
grid-template-columns: 2fr 2fr 8fr;
|
||||
}
|
||||
.appContainer.loggedInLayout > :nth-child(1) { grid-column: 1 / 2; }
|
||||
.appContainer.loggedInLayout > :nth-child(2) { grid-column: 3 / 4; }
|
||||
}
|
||||
@media (max-width: 1000px) and (min-width: 768px) {
|
||||
.appContainer.loggedInLayout {
|
||||
grid-template-columns: 2fr 10fr;
|
||||
}
|
||||
.appContainer.loggedInLayout > :nth-child(1) { grid-column: 1 / 2; }
|
||||
.appContainer.loggedInLayout > :nth-child(2) { grid-column: 2 / 3; }
|
||||
}
|
||||
@media (max-width: 768px) {
|
||||
.appContainer.loggedInLayout {
|
||||
display: block; /* Revert to block for mobile when logged in */
|
||||
}
|
||||
.appContainer.loggedInLayout > :nth-child(1) { grid-column: auto; }
|
||||
.appContainer.loggedInLayout > :nth-child(2) { grid-column: auto; padding: var(--spacing-md); }
|
||||
}
|
||||
|
||||
/* --- End Logged-in Styles --- */
|
||||
|
||||
183
frontend_react/src/components/LeftSidebar/LeftSidebar.jsx
Normal file
183
frontend_react/src/components/LeftSidebar/LeftSidebar.jsx
Normal file
@ -0,0 +1,183 @@
|
||||
// frontend/src/components/LeftSidebar/LeftSidebar.jsx
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import styles from './LeftSidebar.module.css';
|
||||
// Import createProject API function
|
||||
import { fetchProjects, createProject } from '../../services/api';
|
||||
import { FaGithub, FaCog, FaPlus, FaUserCircle, FaSignOutAlt } from 'react-icons/fa';
|
||||
|
||||
/**
|
||||
* LeftSidebar Component
|
||||
* Fetches projects, displays them, handles creating new projects,
|
||||
* and calls onProjectSelect when one is clicked.
|
||||
* Highlights the selected project based on currentProjectId prop.
|
||||
*/
|
||||
function LeftSidebar({ onLogout, onProjectSelect, currentProjectId }) {
|
||||
const [projects, setProjects] = useState([]);
|
||||
const [isLoading, setIsLoading] = useState(true); // Loading state for initial fetch
|
||||
const [isCreating, setIsCreating] = useState(false); // Loading state for creating project
|
||||
const [error, setError] = useState(null);
|
||||
const [username, setUsername] = useState('Gellar'); // Placeholder
|
||||
|
||||
useEffect(() => {
|
||||
// Placeholder: fetch or get username from context/auth state
|
||||
// setUsername(fetchedUsername);
|
||||
}, []);
|
||||
|
||||
// Function to fetch projects, reusable
|
||||
const loadProjects = async () => {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const data = await fetchProjects();
|
||||
console.log("LeftSidebar: Fetched projects data:", data);
|
||||
setProjects(data || []);
|
||||
return data || []; // Return fetched data
|
||||
} catch (err) {
|
||||
if (err.message === "Authentication failed. Please log in again.") {
|
||||
setError('Authentication error. Please log in.');
|
||||
} else {
|
||||
console.error("LeftSidebar: Failed to fetch projects:", err);
|
||||
setError('Failed to load projects.');
|
||||
}
|
||||
setProjects([]); // Clear projects on error
|
||||
if(onProjectSelect) {
|
||||
onProjectSelect(null); // Clear selection in App on error
|
||||
}
|
||||
return []; // Return empty array on error
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// Initial load and setting initial selection
|
||||
useEffect(() => {
|
||||
loadProjects().then(initialProjects => {
|
||||
// Set initial project selection if none is selected yet
|
||||
if (initialProjects && initialProjects.length > 0 && currentProjectId === null && onProjectSelect) {
|
||||
console.log("LeftSidebar: Setting initial project:", initialProjects[0].id);
|
||||
onProjectSelect(initialProjects[0].id);
|
||||
} else if ((!initialProjects || initialProjects.length === 0) && onProjectSelect) {
|
||||
onProjectSelect(null);
|
||||
}
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [onProjectSelect]); // Run only once on mount conceptually (onProjectSelect should be stable)
|
||||
|
||||
|
||||
const handleSelectProject = (projectId) => {
|
||||
if (onProjectSelect) {
|
||||
onProjectSelect(projectId);
|
||||
}
|
||||
console.log("LeftSidebar: Selected project:", projectId);
|
||||
};
|
||||
|
||||
// --- Updated handleNewProject ---
|
||||
const handleNewProject = async () => {
|
||||
console.log("Create new project clicked");
|
||||
const name = prompt("Enter new project name:"); // Get name from user
|
||||
|
||||
if (name && name.trim() !== '') {
|
||||
setIsCreating(true); // Set loading state for creation
|
||||
setError(null); // Clear previous errors
|
||||
try {
|
||||
// Call the createProject API function
|
||||
const newProjectData = await createProject({ name: name.trim() });
|
||||
console.log("LeftSidebar: Project created successfully:", newProjectData);
|
||||
|
||||
// Refresh the project list to include the new one
|
||||
await loadProjects(); // Reuse the fetching logic
|
||||
|
||||
// Automatically select the newly created project
|
||||
// Ensure newProjectData.id exists (mapped in api.js)
|
||||
if (newProjectData && newProjectData.id && onProjectSelect) {
|
||||
console.log("LeftSidebar: Selecting newly created project:", newProjectData.id);
|
||||
onProjectSelect(newProjectData.id);
|
||||
} else {
|
||||
console.warn("LeftSidebar: Could not get ID of newly created project to select it.");
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error("LeftSidebar: Failed to create project:", error);
|
||||
setError(`Error creating project: ${error.message}`); // Show error specific to creation
|
||||
alert(`Error creating project: ${error.message}`); // Also show alert
|
||||
} finally {
|
||||
setIsCreating(false); // Reset loading state
|
||||
}
|
||||
} else if (name !== null) { // Only show alert if prompt wasn't cancelled
|
||||
alert("Project name cannot be empty.");
|
||||
}
|
||||
};
|
||||
// --- End updated handleNewProject ---
|
||||
|
||||
const handleLogoutClick = () => {
|
||||
if (onLogout && typeof onLogout === 'function') {
|
||||
onLogout();
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={styles.sidebar}>
|
||||
<div className={styles.logoSection}>
|
||||
<span className={styles.logo}>Icon</span>
|
||||
<span className={styles.appName}>SurfSmart</span>
|
||||
</div>
|
||||
|
||||
{/* Disable button while creating */}
|
||||
<button
|
||||
className={styles.newProjectButton}
|
||||
onClick={handleNewProject}
|
||||
disabled={isCreating || isLoading} // Disable if loading initial list or creating
|
||||
>
|
||||
{isCreating ? 'Creating...' : 'NEW'}
|
||||
{!isCreating && <FaPlus className={styles.newProjectIcon} />}
|
||||
</button>
|
||||
|
||||
{/* Display creation error */}
|
||||
{error && !isLoading && <p className={styles.error}>{error}</p>}
|
||||
|
||||
<nav className={styles.projectList}>
|
||||
{isLoading && <p>Loading projects...</p>}
|
||||
{!isLoading && !error && projects.map(project => (
|
||||
<a
|
||||
key={project.id}
|
||||
href="#"
|
||||
className={`${styles.projectItem} ${project.id === currentProjectId ? styles.selected : ''}`}
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
// Prevent selection change while creating a new project
|
||||
if (!isCreating) {
|
||||
handleSelectProject(project.id);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{project.name}
|
||||
</a>
|
||||
))}
|
||||
{!isLoading && !error && projects.length === 0 && (
|
||||
<p className={styles.noProjects}>No projects yet. Click NEW!</p>
|
||||
)}
|
||||
</nav>
|
||||
|
||||
<div className={styles.bottomSection}>
|
||||
<div className={styles.accountInfoCapsule} title={`Logged in as ${username}`}>
|
||||
<FaUserCircle className={styles.avatarPlaceholder} />
|
||||
<span className={styles.usernameDisplay}>{username}</span>
|
||||
</div>
|
||||
<div className={styles.actionIcons}>
|
||||
<a href="https://github.com" target="_blank" rel="noopener noreferrer" className={styles.iconLink} title="GitHub">
|
||||
<FaGithub />
|
||||
</a>
|
||||
<a href="#" className={styles.iconLink} title="Settings">
|
||||
<FaCog />
|
||||
</a>
|
||||
<button onClick={handleLogoutClick} className={`${styles.iconLink} ${styles.logoutButton}`} title="Logout">
|
||||
<FaSignOutAlt />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default LeftSidebar;
|
||||
213
frontend_react/src/components/LeftSidebar/LeftSidebar.module.css
Normal file
213
frontend_react/src/components/LeftSidebar/LeftSidebar.module.css
Normal file
@ -0,0 +1,213 @@
|
||||
/* components/LeftSidebar/LeftSidebar.module.css */
|
||||
.sidebar {
|
||||
background-color: var(--background-color); /* White background */
|
||||
/* padding: 20px; */ /* Use variable */
|
||||
padding: var(--spacing-lg) var(--spacing-md); /* Adjust padding */
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
border-right: 1px solid var(--border-color); /* Use variable */
|
||||
box-shadow: var(--shadow-sm); /* Subtle shadow */
|
||||
box-sizing: border-box;
|
||||
grid-row: 1 / -1;
|
||||
transition: var(--transition-base); /* Add transition for potential future changes like collapse */
|
||||
}
|
||||
|
||||
.logoSection {
|
||||
margin-bottom: var(--spacing-xl); /* Use variable */
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding-left: var(--spacing-xs); /* Align with project items */
|
||||
}
|
||||
|
||||
.logo {
|
||||
font-weight: bold;
|
||||
margin-right: var(--spacing-sm);
|
||||
/* Placeholder style */
|
||||
border: 1px solid #ccc;
|
||||
padding: 5px 10px;
|
||||
border-radius: var(--border-radius-sm);
|
||||
}
|
||||
|
||||
.appName {
|
||||
font-size: 1.2em;
|
||||
font-weight: 600; /* Slightly bolder */
|
||||
color: var(--text-color-primary);
|
||||
}
|
||||
|
||||
.newProjectButton {
|
||||
background-color: var(--primary-color);
|
||||
color: var(--text-color-light);
|
||||
border: none;
|
||||
padding: 10px 15px;
|
||||
border-radius: var(--border-radius-md); /* Use variable */
|
||||
cursor: pointer;
|
||||
font-size: 1em;
|
||||
margin-bottom: var(--spacing-lg); /* Use variable */
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
transition: var(--transition-base); /* Use variable */
|
||||
box-shadow: var(--shadow-sm);
|
||||
}
|
||||
|
||||
.newProjectButton:hover {
|
||||
background-color: var(--primary-hover-color);
|
||||
box-shadow: var(--shadow-md);
|
||||
transform: translateY(-1px); /* Subtle lift */
|
||||
}
|
||||
|
||||
.newProjectButton:active {
|
||||
background-color: var(--primary-active-color);
|
||||
transform: translateY(0px);
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
|
||||
.newProjectIcon {
|
||||
margin-left: var(--spacing-sm);
|
||||
}
|
||||
|
||||
.projectList {
|
||||
flex-grow: 1;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.projectItem {
|
||||
display: block;
|
||||
padding: var(--spacing-sm) var(--spacing-md); /* Adjust padding */
|
||||
margin-bottom: var(--spacing-xs); /* Use variable */
|
||||
text-decoration: none;
|
||||
color: var(--text-color-secondary); /* Use variable */
|
||||
border-radius: var(--border-radius-md); /* Use variable */
|
||||
transition: var(--transition-fast); /* Use variable */
|
||||
font-weight: 500;
|
||||
position: relative; /* For potential ::before pseudo-element */
|
||||
}
|
||||
|
||||
.projectItem:hover {
|
||||
background-color: var(--light-color); /* Use variable */
|
||||
color: var(--text-color-primary);
|
||||
}
|
||||
|
||||
.projectItem.selected {
|
||||
background-color: var(--primary-color); /* Use variable */
|
||||
font-weight: 600;
|
||||
color: var(--text-color-light); /* Use variable */
|
||||
box-shadow: inset 0 1px 3px rgba(0, 0, 0, 0.1); /* Inner shadow for selected */
|
||||
}
|
||||
|
||||
/* Optional: Add a small indicator bar for selected item */
|
||||
.projectItem.selected::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 5px;
|
||||
bottom: 5px;
|
||||
width: 3px;
|
||||
background-color: var(--accent-color);
|
||||
border-radius: 0 3px 3px 0;
|
||||
}
|
||||
|
||||
|
||||
.noProjects {
|
||||
color: var(--text-color-secondary);
|
||||
font-style: italic;
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
}
|
||||
|
||||
.bottomSection {
|
||||
margin-top: auto;
|
||||
padding-top: var(--spacing-md); /* Use variable */
|
||||
border-top: 1px solid var(--border-color); /* Use variable */
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: var(--spacing-sm); /* Use variable */
|
||||
}
|
||||
|
||||
/* --- Styles for Account Info --- */
|
||||
.accountInfoCapsule {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
background-color: transparent; /* Make transparent */
|
||||
border: 1px solid var(--border-color); /* Add border */
|
||||
border-radius: 20px;
|
||||
padding: var(--spacing-xs) var(--spacing-sm); /* Adjust padding */
|
||||
cursor: default;
|
||||
transition: var(--transition-fast);
|
||||
flex-shrink: 1;
|
||||
min-width: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.accountInfoCapsule:hover {
|
||||
background-color: var(--light-color); /* Light bg on hover */
|
||||
border-color: #bbb; /* Slightly darker border */
|
||||
}
|
||||
|
||||
.avatarPlaceholder {
|
||||
font-size: 1.4em;
|
||||
color: var(--text-color-secondary);
|
||||
margin-right: var(--spacing-sm);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.usernameDisplay {
|
||||
font-size: 0.9em;
|
||||
font-weight: 500;
|
||||
color: var(--text-color-primary);
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
/* --- End Account Info Styles --- */
|
||||
|
||||
.actionIcons {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--spacing-md); /* Increase gap slightly */
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.iconLink {
|
||||
color: var(--text-color-secondary);
|
||||
font-size: 1.2em; /* Slightly smaller icons */
|
||||
text-decoration: none;
|
||||
transition: var(--transition-fast);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: var(--spacing-xs); /* Add padding for easier click */
|
||||
border-radius: 50%; /* Make icon background circular on hover */
|
||||
}
|
||||
|
||||
.iconLink:hover {
|
||||
color: var(--text-color-primary);
|
||||
background-color: var(--light-color); /* Add background on hover */
|
||||
}
|
||||
|
||||
/* --- Logout Button Style --- */
|
||||
.logoutButton {
|
||||
background: none;
|
||||
border: none;
|
||||
padding: var(--spacing-xs); /* Match iconLink padding */
|
||||
margin: 0;
|
||||
cursor: pointer;
|
||||
color: var(--danger-color); /* Use variable */
|
||||
font-size: 1.2em; /* Match iconLink size */
|
||||
display: flex;
|
||||
align-items: center;
|
||||
border-radius: 50%; /* Match iconLink radius */
|
||||
transition: var(--transition-fast);
|
||||
}
|
||||
|
||||
.logoutButton:hover {
|
||||
color: var(--text-color-light);
|
||||
background-color: var(--danger-color); /* Red background on hover */
|
||||
}
|
||||
/* --- End Logout Button Style --- */
|
||||
|
||||
.error {
|
||||
color: var(--danger-color);
|
||||
font-size: 0.9em;
|
||||
padding: 0 var(--spacing-md); /* Add padding */
|
||||
}
|
||||
156
frontend_react/src/components/LoginPage/LoginPage.jsx
Normal file
156
frontend_react/src/components/LoginPage/LoginPage.jsx
Normal file
@ -0,0 +1,156 @@
|
||||
import React, { useState } from 'react';
|
||||
import { loginUser } from '../../services/api'; // Import the login API function
|
||||
import styles from './LoginPage.module.css';
|
||||
// Optional: Import an icon library, e.g., react-icons
|
||||
import { FaEye, FaEyeSlash } from 'react-icons/fa'; // Example using Font Awesome icons
|
||||
|
||||
/**
|
||||
* LoginPage Component
|
||||
*
|
||||
* Provides a form for users to log in using username and password.
|
||||
* Handles input, submission, API calls, token storage, and error display.
|
||||
*/
|
||||
function LoginPage({ onLoginSuccess }) {
|
||||
const [username, setUsername] = useState('');
|
||||
const [password, setPassword] = useState('');
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState(null);
|
||||
const [showPassword, setShowPassword] = useState(false);
|
||||
|
||||
const handleSubmit = async (event) => {
|
||||
event.preventDefault();
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const response = await loginUser(username, password);
|
||||
|
||||
// --- DEBUGGING LOGS ---
|
||||
console.log('Login API response received:', response); // Log the entire response object
|
||||
// --- END DEBUGGING LOGS ---
|
||||
|
||||
if (response && response.token) { // Added check for response object itself
|
||||
// --- DEBUGGING LOGS ---
|
||||
console.log('Token found in response, attempting to store:', response.token);
|
||||
// --- END DEBUGGING LOGS ---
|
||||
|
||||
localStorage.setItem('authToken', response.token);
|
||||
|
||||
// --- DEBUGGING LOGS ---
|
||||
// Verify immediately after setting
|
||||
const storedToken = localStorage.getItem('authToken');
|
||||
console.log('Token potentially stored. Value in localStorage:', storedToken);
|
||||
if (storedToken !== response.token) {
|
||||
console.error("!!! Token mismatch after setting in localStorage !!!");
|
||||
}
|
||||
// --- END DEBUGGING LOGS ---
|
||||
|
||||
|
||||
console.log('Login successful, proceeding...');
|
||||
if (onLoginSuccess) {
|
||||
onLoginSuccess();
|
||||
} else {
|
||||
window.location.reload();
|
||||
}
|
||||
} else {
|
||||
// --- DEBUGGING LOGS ---
|
||||
console.log('No token found in API response object.');
|
||||
// --- END DEBUGGING LOGS ---
|
||||
setError('Login failed: No token received from server.'); // Updated error message
|
||||
}
|
||||
} catch (err) {
|
||||
setError(err.message || 'Login failed. Please check your credentials.');
|
||||
console.error("Login error object:", err); // Log the full error object
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={styles.loginContainer}>
|
||||
<div className={styles.loginBox}>
|
||||
<h1 className={styles.title}>Login</h1>
|
||||
<p className={styles.subtitle}>Access your SurfSmart dashboard</p>
|
||||
|
||||
<form onSubmit={handleSubmit}>
|
||||
{/* --- Username Input Group --- */}
|
||||
<div className={styles.inputGroup}>
|
||||
<label htmlFor="username">Username</label>
|
||||
<div className={styles.inputWrapper}>
|
||||
<input
|
||||
type="text"
|
||||
id="username"
|
||||
value={username}
|
||||
onChange={(e) => setUsername(e.target.value)}
|
||||
required
|
||||
placeholder="Enter your username"
|
||||
disabled={isLoading}
|
||||
autoComplete="username"
|
||||
aria-invalid={error ? "true" : "false"}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* --- Password Input Group --- */}
|
||||
<div className={styles.inputGroup}>
|
||||
<label htmlFor="password">Password</label>
|
||||
<div className={styles.inputWrapper}>
|
||||
<input
|
||||
type={showPassword ? 'text' : 'password'}
|
||||
id="password"
|
||||
value={password}
|
||||
onChange={(e) => setPassword(e.target.value)}
|
||||
required
|
||||
placeholder="Enter your password"
|
||||
disabled={isLoading}
|
||||
autoComplete="current-password"
|
||||
aria-invalid={error ? "true" : "false"}
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setShowPassword(!showPassword)}
|
||||
className={styles.passwordToggle}
|
||||
aria-label={showPassword ? "Hide password" : "Show password"}
|
||||
disabled={isLoading}
|
||||
title={showPassword ? "Hide password" : "Show password"}
|
||||
>
|
||||
{showPassword ? <FaEyeSlash size={18} /> : <FaEye size={18} />}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* --- Error Message Display Area --- */}
|
||||
{error && (
|
||||
<div role="alert" className={styles.errorMessage}>
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* --- Login Button --- */}
|
||||
<button
|
||||
type="submit"
|
||||
className={`${styles.loginButton} ${isLoading ? styles.loading : ''}`}
|
||||
disabled={isLoading}
|
||||
>
|
||||
{isLoading ? (
|
||||
<>
|
||||
<span className={styles.spinner}></span>
|
||||
<span className={styles.buttonText}>Logging in...</span>
|
||||
</>
|
||||
) : (
|
||||
<span className={styles.buttonText}>Login</span>
|
||||
)}
|
||||
</button>
|
||||
|
||||
{/* --- Optional Links: Register or Forgot Password --- */}
|
||||
<div className={styles.links}>
|
||||
<a href="/register">Don't have an account? Sign Up</a>
|
||||
<a href="/forgot-password">Forgot Password?</a>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default LoginPage;
|
||||
281
frontend_react/src/components/LoginPage/LoginPage.module.css
Normal file
281
frontend_react/src/components/LoginPage/LoginPage.module.css
Normal file
@ -0,0 +1,281 @@
|
||||
/* components/LoginPage/LoginPage.module.css */
|
||||
|
||||
/* Define global or component-scoped CSS Variables */
|
||||
/* Preferably place these in :root or a global CSS file */
|
||||
:global(:root) { /* Use :global if this is module CSS and you want to define global variables */
|
||||
--primary-color: #007bff; /* Primary theme color */
|
||||
--primary-hover-color: #0056b3; /* Primary hover color */
|
||||
--primary-active-color: #004085; /* Primary active color */
|
||||
--error-color: #dc3545; /* Error state color */
|
||||
--error-background-color: rgba(220, 53, 69, 0.08); /* Error background */
|
||||
--error-border-color: rgba(220, 53, 69, 0.2); /* Error border */
|
||||
--success-color: #28a745; /* Success state color */
|
||||
--input-border-color: #ced4da; /* Input border color */
|
||||
--input-focus-border-color: var(--primary-color); /* Input focus border */
|
||||
--input-focus-shadow: 0 0 0 3px rgba(0, 123, 255, 0.15); /* Input focus shadow */
|
||||
--text-color-primary: #212529; /* Primary text color */
|
||||
--text-color-secondary: #6c757d; /* Secondary text color */
|
||||
--text-color-button: #ffffff; /* Button text color */
|
||||
/* Define colors for the animated gradient */
|
||||
--gradient-color-1: #aecbff;
|
||||
--gradient-color-2: #ff7b7b;
|
||||
--gradient-color-3: #c8df66;
|
||||
--gradient-color-4: #0073ff;
|
||||
--background-card: #ffffff; /* Card background color */
|
||||
--font-family-base: system-ui, -apple-system, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, 'Noto Sans', 'Liberation Sans', sans-serif; /* Base font family */
|
||||
--border-radius-base: 6px; /* Base border radius */
|
||||
--transition-base: all 0.2s ease-in-out; /* Base transition */
|
||||
}
|
||||
|
||||
/* Keyframes for the background gradient animation */
|
||||
@keyframes gradientShift {
|
||||
0% { background-position: 0% 50%; }
|
||||
50% { background-position: 100% 50%; }
|
||||
100% { background-position: 0% 50%; }
|
||||
}
|
||||
|
||||
.loginContainer {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
min-height: 100vh;
|
||||
/* Updated: Apply an animated linear gradient background */
|
||||
background: linear-gradient(-45deg, var(--gradient-color-1), var(--gradient-color-2), var(--gradient-color-3), var(--gradient-color-4));
|
||||
background-size: 400% 400%; /* Make gradient larger than the container */
|
||||
animation: gradientShift 15s ease infinite; /* Apply the animation */
|
||||
padding: 20px;
|
||||
font-family: var(--font-family-base); /* Apply base font */
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.loginBox {
|
||||
background-color: var(--background-card); /* Use variable */
|
||||
padding: 40px 35px; /* Slightly adjust padding */
|
||||
border-radius: var(--border-radius-base); /* Use variable */
|
||||
box-shadow: 0 6px 20px rgba(0, 0, 0, 0.08); /* Softer shadow */
|
||||
width: 100%;
|
||||
max-width: 420px; /* Slightly increase width */
|
||||
text-align: center;
|
||||
box-sizing: border-box;
|
||||
z-index: 1; /* Ensure login box is above the background */
|
||||
}
|
||||
|
||||
/* Optional: Logo styles */
|
||||
.logo {
|
||||
max-width: 150px;
|
||||
margin-bottom: 25px;
|
||||
}
|
||||
|
||||
.title {
|
||||
margin-top: 0;
|
||||
margin-bottom: 10px;
|
||||
font-size: 2.2em; /* Increase title font size */
|
||||
font-weight: 600; /* Adjust font weight */
|
||||
color: var(--text-color-primary); /* Use variable */
|
||||
}
|
||||
|
||||
.subtitle {
|
||||
margin-bottom: 35px; /* Increase bottom margin */
|
||||
color: var(--text-color-secondary); /* Use variable */
|
||||
font-size: 1.05em; /* Adjust font size */
|
||||
}
|
||||
|
||||
.inputGroup {
|
||||
margin-bottom: 22px; /* Adjust margin */
|
||||
text-align: left;
|
||||
position: relative; /* Provide base for absolute positioning inside */
|
||||
}
|
||||
|
||||
.inputGroup label {
|
||||
display: block;
|
||||
margin-bottom: 8px; /* Adjust margin below label */
|
||||
font-weight: 500;
|
||||
color: var(--text-color-primary); /* Use variable */
|
||||
font-size: 0.95em;
|
||||
}
|
||||
|
||||
/* Wrapper for input and icon/button */
|
||||
.inputWrapper {
|
||||
position: relative;
|
||||
display: flex; /* For aligning icon and input */
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
/* Optional: Styles for icon next to input */
|
||||
.inputIcon {
|
||||
position: absolute;
|
||||
left: 12px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
color: var(--text-color-secondary);
|
||||
pointer-events: none; /* Prevent icon from interfering with clicks */
|
||||
z-index: 1; /* Ensure it's above input background */
|
||||
}
|
||||
|
||||
.inputGroup input {
|
||||
width: 100%;
|
||||
padding: 12px 15px; /* Base padding */
|
||||
/* Increase left padding if using left icon */
|
||||
/* padding-left: 40px; */
|
||||
/* Increase right padding if using right button (password toggle) */
|
||||
padding-right: 45px;
|
||||
border: 1px solid var(--input-border-color); /* Use variable */
|
||||
border-radius: var(--border-radius-base); /* Use variable */
|
||||
font-size: 1em;
|
||||
box-sizing: border-box;
|
||||
color: var(--text-color-primary);
|
||||
background-color: var(--background-card);
|
||||
transition: var(--transition-base); /* Apply base transition */
|
||||
}
|
||||
|
||||
.inputGroup input::placeholder {
|
||||
color: var(--text-color-secondary);
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
.inputGroup input:focus {
|
||||
outline: none;
|
||||
border-color: var(--input-focus-border-color); /* Use variable */
|
||||
box-shadow: var(--input-focus-shadow); /* Use variable */
|
||||
}
|
||||
|
||||
/* Style when input is invalid */
|
||||
.inputGroup input[aria-invalid="true"] {
|
||||
border-color: var(--error-color);
|
||||
}
|
||||
.inputGroup input[aria-invalid="true"]:focus {
|
||||
border-color: var(--error-color);
|
||||
box-shadow: 0 0 0 3px rgba(220, 53, 69, 0.15); /* Focus shadow for error state */
|
||||
}
|
||||
|
||||
|
||||
/* Password visibility toggle button styles */
|
||||
.passwordToggle {
|
||||
position: absolute;
|
||||
right: 0px; /* Position to the right */
|
||||
top: 0;
|
||||
height: 100%; /* Same height as input */
|
||||
background: transparent;
|
||||
border: none;
|
||||
padding: 0 12px; /* Left/right padding */
|
||||
cursor: pointer;
|
||||
color: var(--text-color-secondary);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
transition: color 0.2s ease;
|
||||
}
|
||||
|
||||
.passwordToggle:hover,
|
||||
.passwordToggle:focus {
|
||||
color: var(--text-color-primary);
|
||||
outline: none; /* Remove default outline, rely on parent focus style */
|
||||
}
|
||||
|
||||
.passwordToggle:disabled {
|
||||
cursor: not-allowed;
|
||||
opacity: 0.5;
|
||||
}
|
||||
|
||||
/* Error message styles */
|
||||
.errorMessage {
|
||||
color: var(--error-color); /* Use variable */
|
||||
background-color: var(--error-background-color); /* Use variable */
|
||||
border: 1px solid var(--error-border-color); /* Use variable */
|
||||
padding: 10px 15px;
|
||||
border-radius: var(--border-radius-base); /* Use variable */
|
||||
margin-top: 5px; /* Space between error and input */
|
||||
margin-bottom: 15px;
|
||||
font-size: 0.9em;
|
||||
text-align: left; /* Align error message left */
|
||||
display: flex; /* For aligning optional icon */
|
||||
align-items: center;
|
||||
transition: var(--transition-base); /* Apply base transition */
|
||||
}
|
||||
|
||||
/* Optional: Error icon styles */
|
||||
.errorIcon {
|
||||
margin-right: 8px;
|
||||
flex-shrink: 0; /* Prevent icon from shrinking */
|
||||
}
|
||||
|
||||
/* Login button styles */
|
||||
.loginButton {
|
||||
width: 100%;
|
||||
padding: 12px 20px;
|
||||
background-color: var(--primary-color); /* Use variable */
|
||||
color: var(--text-color-button); /* Use variable */
|
||||
border: none;
|
||||
border-radius: var(--border-radius-base); /* Use variable */
|
||||
font-size: 1.1em;
|
||||
font-weight: 600; /* Slightly bolder text */
|
||||
cursor: pointer;
|
||||
transition: var(--transition-base); /* Apply base transition */
|
||||
margin-top: 15px; /* Space above button */
|
||||
position: relative; /* For spinner positioning */
|
||||
overflow: hidden; /* Hide overflow if spinner is absolutely positioned */
|
||||
display: flex; /* Use flex to center content */
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px; /* Space between text and spinner */
|
||||
}
|
||||
|
||||
.loginButton:hover {
|
||||
background-color: var(--primary-hover-color); /* Use variable */
|
||||
}
|
||||
|
||||
.loginButton:active {
|
||||
background-color: var(--primary-active-color); /* Use variable */
|
||||
transform: translateY(1px); /* Subtle press effect */
|
||||
}
|
||||
|
||||
.loginButton:disabled {
|
||||
background-color: #cccccc;
|
||||
cursor: not-allowed;
|
||||
opacity: 0.65; /* Adjust opacity for disabled state */
|
||||
}
|
||||
|
||||
/* Hide text when loading, spinner will be shown */
|
||||
.loginButton.loading .buttonText {
|
||||
/* Optional: uncomment to hide text when loading */
|
||||
/* display: none; */
|
||||
}
|
||||
|
||||
/* Loading spinner styles */
|
||||
.spinner {
|
||||
display: inline-block;
|
||||
width: 1em; /* Relative to font size */
|
||||
height: 1em; /* Relative to font size */
|
||||
border: 2px solid rgba(255, 255, 255, 0.3); /* Lighter border */
|
||||
border-radius: 50%;
|
||||
border-top-color: var(--text-color-button); /* Spinner color */
|
||||
animation: spin 1s ease-in-out infinite;
|
||||
vertical-align: middle; /* Align with text */
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
to { transform: rotate(360deg); }
|
||||
}
|
||||
|
||||
/* Optional links styles */
|
||||
.links {
|
||||
margin-top: 25px;
|
||||
font-size: 0.9em;
|
||||
line-height: 1.6; /* Add line-height for better spacing when stacked */
|
||||
}
|
||||
|
||||
.links a {
|
||||
color: var(--primary-color); /* Use variable */
|
||||
text-decoration: none;
|
||||
/* Updated: Make links block elements to stack vertically */
|
||||
display: block;
|
||||
/* Updated: Remove horizontal margin, add vertical margin */
|
||||
margin: 8px 0; /* Add some space between stacked links */
|
||||
transition: color 0.2s ease;
|
||||
}
|
||||
|
||||
.links a:hover {
|
||||
text-decoration: underline;
|
||||
color: var(--primary-hover-color); /* Use variable */
|
||||
}
|
||||
291
frontend_react/src/components/MainContent/MainContent.jsx
Normal file
291
frontend_react/src/components/MainContent/MainContent.jsx
Normal file
@ -0,0 +1,291 @@
|
||||
// frontend/src/components/MainContent/MainContent.jsx
|
||||
import React, { useState, useEffect, useRef } from 'react'; // Import useRef
|
||||
import ProjectHeader from '../ProjectHeader/ProjectHeader.jsx';
|
||||
import UrlCardList from '../UrlCardList/UrlCardList.jsx';
|
||||
import UrlDetailPage from '../UrlDetailPage/UrlDetailPage.jsx';
|
||||
import styles from './MainContent.module.css';
|
||||
import {
|
||||
fetchProjectDetails,
|
||||
fetchProjectUrls,
|
||||
addUrlToProject,
|
||||
askAiAboutProject,
|
||||
deleteUrlFromProject,
|
||||
regenerateSummary,
|
||||
fetchUrlDetails // Import fetchUrlDetails for polling
|
||||
} from '../../services/api';
|
||||
import { FaPlus, FaMagic } from 'react-icons/fa';
|
||||
|
||||
// --- Constants ---
|
||||
const POLLING_INTERVAL_MS = 5000; // Check every 5 seconds
|
||||
|
||||
function MainContent({ currentProjectId }) {
|
||||
const [projectDetails, setProjectDetails] = useState(null);
|
||||
const [urls, setUrls] = useState([]);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState(null);
|
||||
const [aiResponse, setAiResponse] = useState('');
|
||||
const [detailUrlId, setDetailUrlId] = useState(null);
|
||||
|
||||
// --- Polling State ---
|
||||
// Use useRef to store the interval ID so it doesn't trigger re-renders
|
||||
const pollIntervalRef = useRef(null);
|
||||
// --- End Polling State ---
|
||||
|
||||
// Function to update a single URL in the state
|
||||
const updateSingleUrlState = (updatedUrlData) => {
|
||||
if (!updatedUrlData || !updatedUrlData.id) return;
|
||||
setUrls(currentUrls => {
|
||||
// Create a flag to see if an update actually happened
|
||||
let updated = false;
|
||||
const newUrls = currentUrls.map(url => {
|
||||
if (url.id === updatedUrlData.id) {
|
||||
// Only update if data has actually changed to avoid infinite loops
|
||||
// Compare relevant fields like status, title, summary
|
||||
if (url.processingStatus !== updatedUrlData.processingStatus ||
|
||||
url.title !== updatedUrlData.title ||
|
||||
url.summary !== updatedUrlData.summary) {
|
||||
updated = true;
|
||||
return { ...url, ...updatedUrlData, isLoading: false }; // Merge new data
|
||||
}
|
||||
}
|
||||
return url;
|
||||
});
|
||||
// Only set state if an update occurred
|
||||
return updated ? newUrls : currentUrls;
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// Effect for initial data load when project changes
|
||||
useEffect(() => {
|
||||
setDetailUrlId(null);
|
||||
// Clear any existing polling interval when project changes
|
||||
if (pollIntervalRef.current) {
|
||||
clearInterval(pollIntervalRef.current);
|
||||
pollIntervalRef.current = null;
|
||||
}
|
||||
|
||||
if (!currentProjectId) {
|
||||
setProjectDetails(null); setUrls([]); setIsLoading(false); setError(null);
|
||||
return;
|
||||
};
|
||||
setIsLoading(true); setError(null); setAiResponse('');
|
||||
Promise.all([ fetchProjectDetails(currentProjectId), fetchProjectUrls(currentProjectId) ])
|
||||
.then(([details, urlsData]) => {
|
||||
setProjectDetails(details); setUrls(urlsData || []); setIsLoading(false);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error("MainContent: Failed to load project data:", err);
|
||||
// ... (error handling as before) ...
|
||||
setError(`Failed to load data for project ${currentProjectId}.`);
|
||||
setProjectDetails(null); setUrls([]); setIsLoading(false);
|
||||
});
|
||||
|
||||
// Cleanup function for when component unmounts or currentProjectId changes
|
||||
return () => {
|
||||
if (pollIntervalRef.current) {
|
||||
clearInterval(pollIntervalRef.current);
|
||||
pollIntervalRef.current = null;
|
||||
console.log("Polling interval cleared on project change/unmount.");
|
||||
}
|
||||
};
|
||||
}, [currentProjectId]);
|
||||
|
||||
|
||||
// --- Effect for Polling Pending URLs ---
|
||||
useEffect(() => {
|
||||
const pendingUrls = urls.filter(url => url.processingStatus === 'pending');
|
||||
|
||||
if (pendingUrls.length > 0 && !pollIntervalRef.current) {
|
||||
// Start polling only if there are pending URLs and polling isn't already running
|
||||
console.log(`Polling started for ${pendingUrls.length} pending URL(s).`);
|
||||
pollIntervalRef.current = setInterval(async () => {
|
||||
console.log("Polling: Checking status of pending URLs...");
|
||||
const currentPendingIds = urls
|
||||
.filter(u => u.processingStatus === 'pending')
|
||||
.map(u => u.id);
|
||||
|
||||
if (currentPendingIds.length === 0) {
|
||||
console.log("Polling: No more pending URLs, stopping interval.");
|
||||
clearInterval(pollIntervalRef.current);
|
||||
pollIntervalRef.current = null;
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch details for each pending URL
|
||||
// Using Promise.allSettled to handle individual fetch failures
|
||||
const results = await Promise.allSettled(
|
||||
currentPendingIds.map(id => fetchUrlDetails(id))
|
||||
);
|
||||
|
||||
let anyUpdates = false;
|
||||
results.forEach((result, index) => {
|
||||
const urlId = currentPendingIds[index];
|
||||
if (result.status === 'fulfilled') {
|
||||
const updatedData = result.value;
|
||||
// Check if the status is no longer pending
|
||||
if (updatedData && updatedData.processingStatus !== 'pending') {
|
||||
console.log(`Polling: URL ${urlId} status updated to ${updatedData.processingStatus}. Updating state.`);
|
||||
updateSingleUrlState(updatedData); // Update the specific URL in state
|
||||
anyUpdates = true;
|
||||
}
|
||||
} else {
|
||||
// Handle fetch error for a specific URL during polling
|
||||
console.error(`Polling: Failed to fetch details for URL ${urlId}:`, result.reason);
|
||||
// Optionally mark this URL as failed in the state if fetch fails consistently?
|
||||
// updateSingleUrlState({ id: urlId, processingStatus: 'failed', summary: 'Failed to fetch status.' });
|
||||
// anyUpdates = true;
|
||||
}
|
||||
});
|
||||
|
||||
// If all polled URLs are now completed/failed, stop the interval early
|
||||
// Check the main 'urls' state again after potential updates
|
||||
const stillPending = urls.some(u => u.processingStatus === 'pending');
|
||||
if (!stillPending && pollIntervalRef.current) {
|
||||
console.log("Polling: All polled URLs completed/failed, stopping interval.");
|
||||
clearInterval(pollIntervalRef.current);
|
||||
pollIntervalRef.current = null;
|
||||
}
|
||||
|
||||
|
||||
}, POLLING_INTERVAL_MS);
|
||||
|
||||
} else if (pendingUrls.length === 0 && pollIntervalRef.current) {
|
||||
// Stop polling if no pending URLs remain
|
||||
console.log("Polling stopped: No pending URLs.");
|
||||
clearInterval(pollIntervalRef.current);
|
||||
pollIntervalRef.current = null;
|
||||
}
|
||||
|
||||
// Cleanup function for this specific effect (when urls state changes)
|
||||
// This ensures the interval is cleared if the component unmounts while polling
|
||||
return () => {
|
||||
if (pollIntervalRef.current) {
|
||||
clearInterval(pollIntervalRef.current);
|
||||
// console.log("Polling interval cleared on effect cleanup.");
|
||||
// Setting ref to null here might cause issues if another effect relies on it immediately
|
||||
// It's better handled by the main cleanup in the projectId effect
|
||||
}
|
||||
};
|
||||
}, [urls]); // Re-run this effect whenever the urls state changes
|
||||
// --- End Polling Effect ---
|
||||
|
||||
|
||||
const handleViewUrlDetails = (urlId) => { setDetailUrlId(urlId); };
|
||||
const handleBackToList = () => { setDetailUrlId(null); };
|
||||
|
||||
const handleAddUrl = () => {
|
||||
// ... (implementation as before, setting initial status to 'pending') ...
|
||||
if (!currentProjectId) { alert("Please select a project first."); return; }
|
||||
let newUrl = prompt("Enter the new URL (e.g., https://example.com or example.com):");
|
||||
|
||||
if (newUrl && newUrl.trim() !== '') {
|
||||
let processedUrl = newUrl.trim();
|
||||
if (!/^(https?:\/\/|\/\/)/i.test(processedUrl)) {
|
||||
processedUrl = 'https://' + processedUrl;
|
||||
}
|
||||
|
||||
const placeholderId = `temp-${Date.now()}`;
|
||||
const placeholderCard = {
|
||||
id: placeholderId, url: processedUrl, title: '(Processing...)',
|
||||
summary: '', note: '', keywords: [], starred: false, favicon: null,
|
||||
processingStatus: 'pending',
|
||||
};
|
||||
setUrls(prevUrls => [placeholderCard, ...prevUrls]);
|
||||
|
||||
addUrlToProject(currentProjectId, processedUrl)
|
||||
.then(addedUrlData => {
|
||||
setUrls(prevUrls => prevUrls.map(url => {
|
||||
if (url.id === placeholderId) {
|
||||
return {
|
||||
id: addedUrlData.id, url: processedUrl, title: '', summary: '',
|
||||
note: '', keywords: [], starred: false, favicon: null,
|
||||
processingStatus: 'pending', // Set correct initial status
|
||||
createdAt: new Date().toISOString(), updatedAt: new Date().toISOString()
|
||||
};
|
||||
} else { return url; }
|
||||
}));
|
||||
})
|
||||
.catch(err => {
|
||||
console.error("Failed to add URL:", err);
|
||||
setUrls(prevUrls => prevUrls.filter(url => url.id !== placeholderId));
|
||||
alert(`Failed to add URL: ${err.message || 'An unknown error occurred.'}`);
|
||||
});
|
||||
} else if (newUrl !== null) { alert("URL cannot be empty."); }
|
||||
};
|
||||
|
||||
const handleAskAi = () => { /* ... */ };
|
||||
const handleUrlOrderChange = (newOrder) => { /* ... */ };
|
||||
const handleDeleteUrl = (urlIdToDelete) => { /* ... */ };
|
||||
const handleRegenerateSummary = (urlIdToRegen) => {
|
||||
// ... (implementation as before, sets isLoading on the specific card) ...
|
||||
// This function should now also ensure the status becomes 'pending'
|
||||
// so the poller can pick it up if needed, or update directly on success.
|
||||
if (!currentProjectId) return;
|
||||
setUrls(prevUrls => prevUrls.map(url =>
|
||||
url.id === urlIdToRegen ? { ...url, isLoading: true, processingStatus: 'pending', summary: 'Regenerating...' } : url // Set status to pending
|
||||
));
|
||||
regenerateSummary(urlIdToRegen)
|
||||
.then(updatedUrlData => {
|
||||
setUrls(prevUrls => prevUrls.map(url => {
|
||||
if (url.id === urlIdToRegen) {
|
||||
// Merge result, ensure isLoading is false
|
||||
// API returns status 'pending' if queued, or full data on sync completion
|
||||
if (updatedUrlData.status === 'pending') {
|
||||
return { ...url, isLoading: false, processingStatus: 'pending', summary: 'Regeneration queued...' };
|
||||
} else {
|
||||
// Assume completion if status isn't pending
|
||||
return { ...updatedUrlData, id: urlIdToRegen, isLoading: false }; // Ensure ID is correct
|
||||
}
|
||||
}
|
||||
return url;
|
||||
}));
|
||||
})
|
||||
.catch(err => {
|
||||
console.error("Failed to regenerate summary:", err);
|
||||
setUrls(prevUrls => prevUrls.map(url =>
|
||||
// Set status back? Or maybe to failed? Let's mark failed.
|
||||
url.id === urlIdToRegen ? { ...url, isLoading: false, processingStatus: 'failed', summary: 'Regeneration failed.' } : url
|
||||
));
|
||||
alert(`Regeneration failed: ${err.message}`);
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
// --- Render Logic ---
|
||||
if (isLoading) return <div className={styles.loading}>Loading project data...</div>;
|
||||
if (!currentProjectId && !isLoading) return <div className={styles.noProjectSelected}>Select a project from the sidebar to view details.</div>;
|
||||
if (error && !detailUrlId) return <div className={styles.error}>{error}</div>;
|
||||
if (!projectDetails && !isLoading && !error && currentProjectId && !detailUrlId) return <div className={styles.error}>Could not load details for the selected project.</div>;
|
||||
|
||||
return (
|
||||
<div className={styles.mainContent} key={currentProjectId}>
|
||||
{detailUrlId ? (
|
||||
<UrlDetailPage urlId={detailUrlId} onBack={handleBackToList} />
|
||||
) : (
|
||||
<>
|
||||
{projectDetails && ( <ProjectHeader /* ...props... */ /> )}
|
||||
<UrlCardList
|
||||
urls={urls}
|
||||
onOrderChange={handleUrlOrderChange}
|
||||
onDelete={handleDeleteUrl}
|
||||
onRegenerate={handleRegenerateSummary}
|
||||
onViewDetails={handleViewUrlDetails}
|
||||
/>
|
||||
<div className={styles.actionBar}>
|
||||
{/* ... action buttons ... */}
|
||||
<button className={styles.actionButton} onClick={handleAskAi} disabled={!currentProjectId || isLoading}>
|
||||
<FaMagic className={styles.actionIcon} /> Ask AI
|
||||
</button>
|
||||
<button className={styles.actionButton} onClick={handleAddUrl} disabled={!currentProjectId || isLoading}>
|
||||
Add url <FaPlus className={styles.actionIcon} />
|
||||
</button>
|
||||
</div>
|
||||
{aiResponse && <div className={styles.aiResponseArea}>{aiResponse}</div>}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default MainContent;
|
||||
113
frontend_react/src/components/MainContent/MainContent.module.css
Normal file
113
frontend_react/src/components/MainContent/MainContent.module.css
Normal file
@ -0,0 +1,113 @@
|
||||
/* components/MainContent/MainContent.module.css */
|
||||
.mainContent {
|
||||
flex: 1 1 auto;
|
||||
background-color: transparent;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow-y: auto;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.loading, .error, .noProjectSelected {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
flex-grow: 1;
|
||||
font-size: 1.2em;
|
||||
color: var(--text-color-secondary); /* Use variable */
|
||||
}
|
||||
|
||||
.error {
|
||||
color: var(--danger-color); /* Use variable */
|
||||
}
|
||||
|
||||
.actionBar {
|
||||
margin-top: var(--spacing-lg);
|
||||
padding-top: var(--spacing-lg);
|
||||
border-top: 1px solid var(--border-color); /* Use variable */
|
||||
display: flex;
|
||||
justify-content: flex-end;
|
||||
gap: var(--spacing-md);
|
||||
padding-bottom: var(--spacing-sm);
|
||||
}
|
||||
|
||||
.actionButton {
|
||||
/* Base styles */
|
||||
border: none;
|
||||
padding: 10px 20px;
|
||||
border-radius: var(--border-radius-md);
|
||||
cursor: pointer;
|
||||
font-size: 1em;
|
||||
font-weight: 500;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: var(--spacing-sm);
|
||||
transition: var(--transition-base);
|
||||
box-shadow: var(--shadow-sm);
|
||||
}
|
||||
|
||||
.actionButton:hover {
|
||||
box-shadow: var(--shadow-md);
|
||||
transform: translateY(-1px);
|
||||
}
|
||||
.actionButton:active {
|
||||
transform: translateY(0px);
|
||||
box-shadow: none;
|
||||
}
|
||||
.actionButton:disabled {
|
||||
opacity: 0.65;
|
||||
cursor: not-allowed;
|
||||
box-shadow: none;
|
||||
transform: none;
|
||||
}
|
||||
|
||||
|
||||
/* --- Updated AI Button Styles --- */
|
||||
/* Ask AI Button (Assuming it's the first button) */
|
||||
.actionButton:first-child {
|
||||
background-color: var(--ai-background); /* Use light background */
|
||||
color: var(--ai-text); /* Use AI text color (dark red) */
|
||||
border-color: var(--ai-text); /* Use AI text color for border */
|
||||
}
|
||||
.actionButton:first-child:hover {
|
||||
background-color: var(--ai-background-hover); /* Very subtle red background */
|
||||
color: var(--ai-text-hover);
|
||||
border-color: var(--ai-text-hover);
|
||||
}
|
||||
.actionButton:first-child:active {
|
||||
background-color: var(--ai-background-activate); /* Slightly darker subtle red background */
|
||||
color: var(--ai-text-activate);
|
||||
border-color: var(--ai-text-activate);
|
||||
}
|
||||
/* --- End AI Button Styles --- */
|
||||
|
||||
|
||||
/* Add URL Button (Assuming it's the last button) */
|
||||
.actionButton:last-child {
|
||||
background-color: var(--success-color); /* Use variable */
|
||||
color: var(--text-color-light); /* Use variable */
|
||||
}
|
||||
.actionButton:last-child:hover {
|
||||
background-color: #58a85c; /* Slightly darker success */
|
||||
}
|
||||
.actionButton:last-child:active {
|
||||
background-color: #4a9b4f;
|
||||
}
|
||||
|
||||
|
||||
.actionIcon {
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.aiResponseArea {
|
||||
margin-top: var(--spacing-md);
|
||||
padding: var(--spacing-md);
|
||||
background-color: #e9f5ff; /* Keep light blue or use a new variable */
|
||||
border: 1px solid #bce8f1;
|
||||
border-radius: var(--border-radius-sm);
|
||||
color: #31708f;
|
||||
white-space: pre-wrap;
|
||||
font-size: 0.95em;
|
||||
}
|
||||
|
||||
@ -0,0 +1,87 @@
|
||||
// --- 需要根据后端 projects.py 提供的 API 进行修改 ---
|
||||
// 1. 确认接收的 props (name, description, topic, summary, keywords) 与 MainContent 传递的一致
|
||||
// 2. 确保 WordCloud 组件能正确处理 keywords: [{word, percentage}]
|
||||
// 3. (可选) 添加编辑项目、重新计算关键词等操作的触发器 (按钮)
|
||||
// ----------------------------------------------------
|
||||
|
||||
import React from 'react';
|
||||
import styles from './ProjectHeader.module.css';
|
||||
|
||||
// Updated WordCloud component to accept keywords prop and map it
|
||||
const WordCloud = ({ keywords }) => {
|
||||
// Map backend keywords { word, percentage } to { text, value } if needed by a library
|
||||
// Or render directly
|
||||
const wordCloudData = keywords?.map(kw => ({ text: kw.word, value: kw.percentage })) || [];
|
||||
|
||||
if (!wordCloudData || wordCloudData.length === 0) {
|
||||
return <div className={styles.wordCloudPlaceholder}>No keyword data available. (Recalculate?)</div>;
|
||||
}
|
||||
|
||||
// Simple display for placeholder - Replace with actual word cloud rendering
|
||||
const maxPercentage = Math.max(...wordCloudData.map(d => d.value), 0) || 100;
|
||||
|
||||
return (
|
||||
<div className={styles.wordCloud}>
|
||||
{wordCloudData.slice(0, 20).map((item, index) => ( // Show top 20 words
|
||||
<span
|
||||
key={index}
|
||||
style={{
|
||||
fontSize: `${10 + (item.value / maxPercentage) * 15}px`, // Example scaling
|
||||
margin: '2px 5px',
|
||||
display: 'inline-block',
|
||||
opacity: 0.6 + (item.value / maxPercentage) * 0.4, // Example opacity scaling
|
||||
color: 'var(--text-color-secondary)' // Use CSS var
|
||||
}}
|
||||
title={`${item.text} (${item.value.toFixed(1)}%)`} // Add tooltip with percentage
|
||||
>
|
||||
{item.text}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* ProjectHeader Component
|
||||
* Displays the project's name, description, topic, summary and keywords.
|
||||
*/
|
||||
// Accept more props based on backend response
|
||||
function ProjectHeader({ name, description, topic, summary, keywords }) {
|
||||
// TODO: Add handlers for Edit, Recalculate Keywords, Delete Project if buttons are added
|
||||
// const handleEditClick = () => { ... };
|
||||
// const handleRecalcClick = () => { ... call recalculateProjectKeywords API ... };
|
||||
|
||||
return (
|
||||
<div className={styles.projectHeader}>
|
||||
{/* Left side: Name and Description */}
|
||||
<div className={styles.projectInfo}>
|
||||
<h1 className={styles.projectName}>{name || 'Project Name'}</h1>
|
||||
{/* Display Topic if available */}
|
||||
{topic && <p className={styles.projectTopic}>Topic: {topic}</p>}
|
||||
<p className={styles.projectDescription}>{description || 'No description provided.'}</p>
|
||||
{/* Display AI Summary if available */}
|
||||
{summary && <p className={styles.projectSummary}>AI Summary: {summary}</p>}
|
||||
{/* TODO: Add Edit button here? */}
|
||||
</div>
|
||||
|
||||
{/* Right side: Global Word Cloud */}
|
||||
<div className={styles.wordCloudContainer}>
|
||||
<h2 className={styles.wordCloudTitle}>Project Keywords</h2>
|
||||
<WordCloud keywords={keywords} />
|
||||
{/* Optional: Button to trigger recalculation */}
|
||||
{/* <button onClick={handleRecalcClick} className={styles.recalcButton}>Recalculate Keywords</button> */}
|
||||
</div>
|
||||
{/* TODO: Add Delete Project button somewhere? Maybe outside this component */}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Add styles for topic, summary, recalcButton in ProjectHeader.module.css if needed
|
||||
// styles.css:
|
||||
// .projectTopic { font-style: italic; color: var(--secondary-color); margin-bottom: 5px; }
|
||||
// .projectSummary { margin-top: 10px; padding-top: 10px; border-top: 1px dashed var(--border-color); font-size: 0.9em; color: var(--text-color-secondary); }
|
||||
// .recalcButton { margin-top: 10px; font-size: 0.8em; padding: 4px 8px; }
|
||||
|
||||
|
||||
export default ProjectHeader;
|
||||
@ -0,0 +1,96 @@
|
||||
/* components/ProjectHeader/ProjectHeader.module.css */
|
||||
.projectHeader {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: flex-start;
|
||||
background-color: var(--white-color); /* Use variable */
|
||||
padding: var(--spacing-lg); /* Use variable */
|
||||
border-radius: var(--border-radius-lg); /* Use variable */
|
||||
margin-bottom: var(--spacing-xl); /* Use variable */
|
||||
box-shadow: var(--shadow-md); /* Use variable */
|
||||
gap: var(--spacing-lg); /* Use variable */
|
||||
border: 1px solid var(--border-color); /* Add subtle border */
|
||||
}
|
||||
|
||||
.projectInfo {
|
||||
flex: 1;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.projectName {
|
||||
margin: 0 0 var(--spacing-sm) 0; /* Use variable */
|
||||
font-size: 1.8em;
|
||||
font-weight: 600; /* Bolder */
|
||||
color: var(--text-color-primary); /* Use variable */
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
.projectDescription {
|
||||
margin: 0;
|
||||
color: var(--text-color-secondary); /* Use variable */
|
||||
line-height: 1.6; /* Increase line height */
|
||||
}
|
||||
|
||||
.wordCloudContainer {
|
||||
flex: 0 0 35%;
|
||||
min-width: 250px; /* Increase min-width slightly */
|
||||
/* border-left: 1px solid var(--border-color); */ /* Remove border, use spacing */
|
||||
/* padding-left: var(--spacing-lg); */ /* Remove padding, rely on gap */
|
||||
background-color: var(--light-color); /* Subtle background for contrast */
|
||||
padding: var(--spacing-md); /* Add padding inside the container */
|
||||
border-radius: var(--border-radius-md); /* Round corners */
|
||||
}
|
||||
|
||||
.wordCloudTitle {
|
||||
font-size: 1.0em; /* Smaller title */
|
||||
font-weight: 600;
|
||||
color: var(--text-color-secondary); /* Use variable */
|
||||
margin-top: 0;
|
||||
margin-bottom: var(--spacing-md); /* Use variable */
|
||||
text-transform: uppercase; /* Uppercase for style */
|
||||
letter-spacing: 0.5px;
|
||||
}
|
||||
|
||||
.wordCloud {
|
||||
min-height: 100px;
|
||||
line-height: 1.9; /* Adjust for better spacing */
|
||||
text-align: center;
|
||||
/* Add some visual style */
|
||||
filter: saturate(1.1); /* Slightly more vibrant colors */
|
||||
}
|
||||
|
||||
.wordCloud span { /* Style individual words */
|
||||
cursor: default; /* Indicate non-interactive */
|
||||
transition: var(--transition-fast);
|
||||
color: var(--secondary-color); /* Base color */
|
||||
}
|
||||
/* Optional: Hover effect for words */
|
||||
/* .wordCloud span:hover {
|
||||
color: var(--primary-color);
|
||||
transform: scale(1.1);
|
||||
} */
|
||||
|
||||
|
||||
.wordCloudPlaceholder {
|
||||
color: var(--text-color-secondary);
|
||||
opacity: 0.7; /* Make placeholder less prominent */
|
||||
font-style: italic;
|
||||
text-align: center;
|
||||
padding-top: 20px;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
|
||||
/* Responsive adjustments */
|
||||
@media (max-width: 900px) {
|
||||
.projectHeader {
|
||||
flex-direction: column;
|
||||
align-items: stretch;
|
||||
}
|
||||
.wordCloudContainer {
|
||||
flex-basis: auto;
|
||||
margin-top: var(--spacing-lg);
|
||||
/* border-top: 1px solid var(--border-color); */ /* Remove top border */
|
||||
/* padding-top: var(--spacing-lg); */
|
||||
}
|
||||
}
|
||||
164
frontend_react/src/components/UrlCard/UrlCard.jsx
Normal file
164
frontend_react/src/components/UrlCard/UrlCard.jsx
Normal file
@ -0,0 +1,164 @@
|
||||
// frontend/src/components/UrlCard/UrlCard.jsx
|
||||
import React from 'react';
|
||||
import { useSortable } from '@dnd-kit/sortable';
|
||||
import { CSS } from '@dnd-kit/utilities';
|
||||
import styles from './UrlCard.module.css';
|
||||
import { FaEdit, FaTrashAlt, FaBars, FaSyncAlt, FaSpinner, FaExclamationTriangle, FaTag, FaStar, FaRegStar, FaStickyNote } from 'react-icons/fa';
|
||||
|
||||
// Simple Keyword Tag component
|
||||
const KeywordTag = ({ keyword }) => (
|
||||
<span className={styles.keywordTag}>{keyword.word}</span>
|
||||
);
|
||||
|
||||
/**
|
||||
* UrlCard Component
|
||||
* Displays URL info, handles drag-and-drop, and now triggers onViewDetails on click.
|
||||
*/
|
||||
function UrlCard({
|
||||
id,
|
||||
url,
|
||||
title,
|
||||
summary,
|
||||
keywords,
|
||||
processingStatus,
|
||||
favicon,
|
||||
starred,
|
||||
note,
|
||||
isLoading,
|
||||
onDelete,
|
||||
onRegenerate,
|
||||
onViewDetails, // Accept the new prop
|
||||
// Add handlers for starring/editing notes if implemented
|
||||
}) {
|
||||
|
||||
const {
|
||||
attributes,
|
||||
listeners, // listeners for drag handle
|
||||
setNodeRef,
|
||||
transform,
|
||||
transition,
|
||||
isDragging
|
||||
} = useSortable({ id: id });
|
||||
|
||||
const style = {
|
||||
transform: CSS.Transform.toString(transform),
|
||||
transition,
|
||||
opacity: isDragging ? 0.8 : 1,
|
||||
marginBottom: '15px',
|
||||
position: 'relative',
|
||||
zIndex: isDragging ? 100 : 'auto',
|
||||
borderLeft: processingStatus === 'pending' ? '3px solid orange' : (processingStatus === 'failed' ? '3px solid red' : '3px solid transparent'),
|
||||
};
|
||||
|
||||
const handleEdit = (e) => {
|
||||
e.stopPropagation(); // Prevent card click when clicking button
|
||||
console.log("Edit clicked for:", id);
|
||||
alert(`Edit Note/Details for URL ID: ${id} (Placeholder)`);
|
||||
};
|
||||
|
||||
const handleDelete = (e) => {
|
||||
e.stopPropagation(); // Prevent card click when clicking button
|
||||
onDelete(); // Call original delete handler
|
||||
}
|
||||
|
||||
const handleRegenerate = (e) => {
|
||||
e.stopPropagation(); // Prevent card click when clicking button
|
||||
onRegenerate(); // Call original regenerate handler
|
||||
}
|
||||
|
||||
const handleStarClick = (e) => {
|
||||
e.stopPropagation(); // Prevent card click when clicking button
|
||||
// TODO: Implement star toggling logic + API call
|
||||
console.log("Star clicked for:", id);
|
||||
alert(`Toggle star for ${id} (Placeholder)`);
|
||||
}
|
||||
|
||||
// Determine content based on processing status
|
||||
let cardBody;
|
||||
if (processingStatus === 'pending') {
|
||||
cardBody = <div className={styles.statusInfo}><FaSpinner className={styles.spinnerIconSmall} /> Processing...</div>;
|
||||
} else if (processingStatus === 'failed') {
|
||||
cardBody = <div className={styles.statusInfo}><FaExclamationTriangle className={styles.errorIcon} /> Processing Failed</div>;
|
||||
} else { // completed or undefined
|
||||
cardBody = (
|
||||
<>
|
||||
<p className={styles.summary}>{summary || 'No summary available.'}</p>
|
||||
{keywords && keywords.length > 0 && (
|
||||
<div className={styles.keywordsContainer}>
|
||||
<FaTag className={styles.keywordIcon} />
|
||||
{keywords.slice(0, 5).map((kw, index) => <KeywordTag key={index} keyword={kw} />)}
|
||||
{keywords.length > 5 && <span className={styles.moreKeywords}>...</span>}
|
||||
</div>
|
||||
)}
|
||||
{note && (
|
||||
<div className={styles.noteContainer}>
|
||||
<FaStickyNote className={styles.noteIcon} />
|
||||
<span className={styles.noteText}>{note}</span>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// --- Click handler for the main content area ---
|
||||
const handleCardClick = () => {
|
||||
// Only navigate if not dragging
|
||||
if (!isDragging && onViewDetails) {
|
||||
onViewDetails(); // Call the handler passed from MainContent
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
// setNodeRef and attributes for dnd-kit Sortable
|
||||
<div ref={setNodeRef} style={style} {...attributes} className={`${styles.card} ${isLoading ? styles.loading : ''}`}>
|
||||
{isLoading && (
|
||||
<div className={styles.spinnerOverlay}>
|
||||
<FaSpinner className={styles.spinnerIcon} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Left side: Buttons */}
|
||||
<div className={styles.leftColumn}>
|
||||
{favicon && <img src={favicon} alt="favicon" className={styles.favicon} onError={(e) => e.target.style.display='none'}/>}
|
||||
<button onClick={handleStarClick} className={`${styles.iconButton} ${styles.starButton}`} title={starred ? "Unstar" : "Star"}>
|
||||
{starred ? <FaStar /> : <FaRegStar />}
|
||||
</button>
|
||||
<button
|
||||
className={styles.iconButton}
|
||||
onClick={handleRegenerate} // Use specific handler
|
||||
title="Regenerate Summary/Keywords"
|
||||
disabled={isLoading || processingStatus === 'pending'}
|
||||
>
|
||||
<FaSyncAlt />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Center: Main Content - Make this part clickable */}
|
||||
<div className={styles.cardContent} onClick={handleCardClick} style={{ cursor: 'pointer' }}> {/* Add onClick and pointer */}
|
||||
<div className={styles.cardHeader}>
|
||||
<span className={styles.cardTitle}>{title || 'No Title'}</span>
|
||||
{/* Make URL link not trigger card click? Optional, but often good UX */}
|
||||
<a href={url} target="_blank" rel="noopener noreferrer" className={styles.urlLink} onClick={(e) => e.stopPropagation()}>
|
||||
{url}
|
||||
</a>
|
||||
<div className={styles.cardActions}>
|
||||
<button className={styles.iconButton} onClick={handleEdit} title="Edit/View Note">
|
||||
<FaEdit />
|
||||
</button>
|
||||
<button className={`${styles.iconButton} ${styles.deleteButton}`} onClick={handleDelete} title="Delete">
|
||||
<FaTrashAlt />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
{cardBody}
|
||||
</div>
|
||||
|
||||
{/* Right side: Drag Handle - Use listeners from useSortable */}
|
||||
<div className={styles.dragHandle} {...listeners} title="Drag to reorder">
|
||||
<FaBars />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default UrlCard;
|
||||
262
frontend_react/src/components/UrlCard/UrlCard.module.css
Normal file
262
frontend_react/src/components/UrlCard/UrlCard.module.css
Normal file
@ -0,0 +1,262 @@
|
||||
/* components/UrlCard/UrlCard.module.css */
|
||||
.card {
|
||||
background-color: #ffffff;
|
||||
border-radius: 8px;
|
||||
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
/* padding: 15px 20px; */ /* Padding moved to inner columns */
|
||||
display: flex;
|
||||
align-items: stretch; /* Make columns same height */
|
||||
/* gap: 15px; */ /* Replaced by padding on columns */
|
||||
position: relative; /* For spinner overlay */
|
||||
transition: box-shadow 0.2s ease, border-left 0.3s ease; /* Added border transition */
|
||||
overflow: hidden; /* Prevent content spillover */
|
||||
}
|
||||
|
||||
.card:hover {
|
||||
box-shadow: 0 3px 6px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.card.loading {
|
||||
/* Opacity handled by spinner overlay now */
|
||||
pointer-events: none; /* Prevent interaction while API call is loading */
|
||||
}
|
||||
|
||||
.spinnerOverlay {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: rgba(255, 255, 255, 0.7);
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
z-index: 10;
|
||||
border-radius: 8px; /* Match card radius */
|
||||
}
|
||||
|
||||
.spinnerIcon {
|
||||
font-size: 1.5em;
|
||||
color: #007bff;
|
||||
animation: spin 1s linear infinite;
|
||||
}
|
||||
.spinnerIconSmall {
|
||||
font-size: 1em; /* Smaller spinner for inline status */
|
||||
color: #007bff;
|
||||
animation: spin 1s linear infinite;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
0% { transform: rotate(0deg); }
|
||||
100% { transform: rotate(360deg); }
|
||||
}
|
||||
|
||||
/* --- Column Structure --- */
|
||||
.leftColumn {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: flex-start;
|
||||
padding: var(--spacing-md) var(--spacing-sm);
|
||||
gap: var(--spacing-md);
|
||||
border-right: 1px solid var(--border-color); /* Use variable */
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.cardContent {
|
||||
flex-grow: 1; /* Takes up most space */
|
||||
min-width: 0; /* Prevent overflow */
|
||||
padding: 15px; /* Padding */
|
||||
}
|
||||
|
||||
.dragHandle {
|
||||
display: flex;
|
||||
align-items: center; /* Center icon vertically */
|
||||
justify-content: center;
|
||||
cursor: grab;
|
||||
color: #adb5bd; /* Light color for handle */
|
||||
padding: 15px 10px; /* Padding */
|
||||
border-left: 1px solid #eee; /* Separator line */
|
||||
flex-shrink: 0; /* Prevent shrinking */
|
||||
}
|
||||
/* --- End Column Structure --- */
|
||||
|
||||
|
||||
.cardHeader {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 10px;
|
||||
gap: 10px; /* Space between header elements */
|
||||
flex-wrap: wrap; /* Allow wrapping on smaller widths within card */
|
||||
}
|
||||
|
||||
.cardTitle { /* New style for title */
|
||||
font-weight: 600; /* Make title slightly bolder */
|
||||
color: #333;
|
||||
margin-right: auto; /* Push URL and actions to the right */
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
|
||||
.urlLink {
|
||||
color: #007bff;
|
||||
text-decoration: none;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis; /* Truncate long URLs */
|
||||
flex-shrink: 1; /* Allow URL to shrink */
|
||||
min-width: 50px; /* Prevent URL from becoming too small */
|
||||
margin-left: 10px; /* Space from title */
|
||||
}
|
||||
|
||||
.urlLink:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.cardActions {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px; /* Space between action icons */
|
||||
flex-shrink: 0; /* Prevent actions from shrinking */
|
||||
}
|
||||
|
||||
.iconButton {
|
||||
background: none;
|
||||
border: none;
|
||||
color: var(--text-color-secondary); /* Use variable */
|
||||
cursor: pointer;
|
||||
padding: 5px;
|
||||
font-size: 0.95em;
|
||||
transition: var(--transition-fast); /* Use variable */
|
||||
line-height: 1;
|
||||
border-radius: 50%;
|
||||
}
|
||||
.iconButton:hover {
|
||||
color: var(--text-color-primary); /* Use variable */
|
||||
background-color: var(--light-color); /* Use variable */
|
||||
}
|
||||
.iconButton:disabled {
|
||||
color: #adb5bd;
|
||||
cursor: not-allowed;
|
||||
background-color: transparent !important;
|
||||
}
|
||||
|
||||
/* --- Specific AI Regen Button Style --- */
|
||||
/* Target the regen button specifically if possible, otherwise rely on its position/icon */
|
||||
/* Assuming it's the last button in leftColumn for now */
|
||||
.leftColumn > .iconButton:last-child { /* Example selector */
|
||||
color: var(--ai-text); /* Use AI text color */
|
||||
}
|
||||
.leftColumn > .iconButton:last-child:hover {
|
||||
color: var(--ai-text-hover); /* Use AI hover text color */
|
||||
background-color: rgba(155, 53, 53, 0.1); /* Subtle reddish background on hover */
|
||||
}
|
||||
.leftColumn > .iconButton:last-child:disabled {
|
||||
color: #c7a9a9; /* Muted red when disabled */
|
||||
background-color: transparent !important;
|
||||
}
|
||||
/* --- End AI Regen Button Style --- */
|
||||
|
||||
.deleteButton:hover {
|
||||
color: #dc3545; /* Red for delete */
|
||||
}
|
||||
|
||||
.starButton {
|
||||
color: #ffc107; /* Yellow for stars */
|
||||
}
|
||||
.starButton:hover {
|
||||
color: #e0a800;
|
||||
}
|
||||
|
||||
|
||||
.summary {
|
||||
color: #555;
|
||||
font-size: 0.95em;
|
||||
line-height: 1.5;
|
||||
margin: 0 0 10px 0; /* Add bottom margin */
|
||||
}
|
||||
|
||||
.dragHandle:active {
|
||||
cursor: grabbing;
|
||||
}
|
||||
|
||||
/* --- New Styles for Status, Keywords, Favicon, Note, Star --- */
|
||||
.statusInfo {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 20px;
|
||||
font-style: italic;
|
||||
color: #666;
|
||||
background-color: #f8f9fa;
|
||||
border-radius: 4px;
|
||||
min-height: 50px; /* Give it some height */
|
||||
}
|
||||
|
||||
.errorIcon {
|
||||
color: #dc3545; /* Red for error */
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.keywordsContainer {
|
||||
margin-top: 10px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.keywordIcon {
|
||||
color: #6c757d;
|
||||
margin-right: 5px;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
|
||||
.keywordTag {
|
||||
background-color: #e9ecef;
|
||||
color: #495057;
|
||||
padding: 2px 6px;
|
||||
border-radius: 10px; /* Pill shape */
|
||||
font-size: 0.8em;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.moreKeywords {
|
||||
font-size: 0.8em;
|
||||
color: #6c757d;
|
||||
}
|
||||
|
||||
.favicon {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
object-fit: contain;
|
||||
/* margin-right: 8px; */ /* Spacing handled by leftColumn gap */
|
||||
}
|
||||
|
||||
.noteContainer {
|
||||
margin-top: 10px;
|
||||
padding-top: 10px;
|
||||
border-top: 1px dashed #eee;
|
||||
font-size: 0.9em;
|
||||
color: #666;
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.noteIcon {
|
||||
color: #6c757d;
|
||||
margin-top: 2px; /* Align icon nicely */
|
||||
flex-shrink: 0;
|
||||
}
|
||||
.noteText {
|
||||
white-space: pre-wrap; /* Respect line breaks in notes */
|
||||
}
|
||||
|
||||
|
||||
/* --- End New Styles --- */
|
||||
|
||||
92
frontend_react/src/components/UrlCardList/UrlCardList.jsx
Normal file
92
frontend_react/src/components/UrlCardList/UrlCardList.jsx
Normal file
@ -0,0 +1,92 @@
|
||||
// frontend/src/components/UrlCardList/UrlCardList.jsx
|
||||
import React from 'react';
|
||||
import {
|
||||
DndContext,
|
||||
closestCenter,
|
||||
KeyboardSensor,
|
||||
PointerSensor,
|
||||
useSensor,
|
||||
useSensors,
|
||||
} from '@dnd-kit/core';
|
||||
import {
|
||||
arrayMove,
|
||||
SortableContext,
|
||||
sortableKeyboardCoordinates,
|
||||
verticalListSortingStrategy,
|
||||
} from '@dnd-kit/sortable';
|
||||
|
||||
import UrlCard from '../UrlCard/UrlCard.jsx';
|
||||
import styles from './UrlCardList.module.css';
|
||||
|
||||
/**
|
||||
* UrlCardList Component
|
||||
* Renders the list and handles drag-and-drop.
|
||||
* Now accepts and passes down onViewDetails prop.
|
||||
*/
|
||||
// Accept onViewDetails prop
|
||||
function UrlCardList({ urls, onOrderChange, onDelete, onRegenerate, onViewDetails }) {
|
||||
|
||||
const sensors = useSensors(
|
||||
useSensor(PointerSensor),
|
||||
useSensor(KeyboardSensor, {
|
||||
coordinateGetter: sortableKeyboardCoordinates,
|
||||
})
|
||||
);
|
||||
|
||||
const handleDragEnd = (event) => {
|
||||
const { active, over } = event;
|
||||
if (over && active.id !== over.id) {
|
||||
const oldIndex = urls.findIndex((url) => url.id === active.id);
|
||||
const newIndex = urls.findIndex((url) => url.id === over.id);
|
||||
if (oldIndex === -1 || newIndex === -1) {
|
||||
console.error("Could not find dragged item index");
|
||||
return;
|
||||
}
|
||||
const newOrder = arrayMove(urls, oldIndex, newIndex);
|
||||
onOrderChange(newOrder);
|
||||
}
|
||||
};
|
||||
|
||||
if (!urls || urls.length === 0) {
|
||||
return <div className={styles.emptyList}>No URLs added to this project yet. Start by adding one below!</div>;
|
||||
}
|
||||
|
||||
const urlIds = urls.map(url => url.id);
|
||||
|
||||
return (
|
||||
<DndContext
|
||||
sensors={sensors}
|
||||
collisionDetection={closestCenter}
|
||||
onDragEnd={handleDragEnd}
|
||||
>
|
||||
<SortableContext
|
||||
items={urlIds}
|
||||
strategy={verticalListSortingStrategy}
|
||||
>
|
||||
<div className={styles.urlCardList}>
|
||||
{urls.map((url) => (
|
||||
<UrlCard
|
||||
key={url.id}
|
||||
id={url.id}
|
||||
url={url.url}
|
||||
title={url.title}
|
||||
summary={url.summary}
|
||||
keywords={url.keywords} // Pass keywords
|
||||
processingStatus={url.processingStatus} // Pass status
|
||||
favicon={url.favicon} // Pass favicon
|
||||
starred={url.starred} // Pass starred
|
||||
note={url.note} // Pass note
|
||||
isLoading={url.isLoading}
|
||||
onDelete={() => onDelete(url.id)}
|
||||
onRegenerate={() => onRegenerate(url.id)}
|
||||
onViewDetails={() => onViewDetails(url.id)} // Pass onViewDetails down
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</SortableContext>
|
||||
</DndContext>
|
||||
);
|
||||
}
|
||||
|
||||
export default UrlCardList;
|
||||
|
||||
@ -0,0 +1,16 @@
|
||||
/* components/UrlCardList/UrlCardList.module.css */
|
||||
.urlCardList {
|
||||
/* Container for the list */
|
||||
margin-top: 20px; /* Space below header */
|
||||
flex-grow: 1; /* Allows list to take available space if MainContent is flex */
|
||||
}
|
||||
|
||||
.emptyList {
|
||||
text-align: center;
|
||||
color: #888;
|
||||
font-style: italic;
|
||||
padding: 40px 20px;
|
||||
border: 2px dashed #e0e0e0;
|
||||
border-radius: 8px;
|
||||
background-color: #fafafa;
|
||||
}
|
||||
308
frontend_react/src/components/UrlDetailPage/UrlDetailPage.jsx
Normal file
308
frontend_react/src/components/UrlDetailPage/UrlDetailPage.jsx
Normal file
@ -0,0 +1,308 @@
|
||||
import React, { useState, useEffect, useCallback } from 'react';
|
||||
import { fetchUrlDetails, updateUrlDetails } from '../../services/api'; // Import API functions
|
||||
import styles from './UrlDetailPage.module.css'; // We'll create this CSS module next
|
||||
import { FaLink, FaStar, FaRegStar, FaStickyNote, FaTags, FaInfoCircle, FaSpinner, FaExclamationTriangle, FaCalendarAlt, FaSave, FaTimes, FaEdit, FaCheckCircle } from 'react-icons/fa'; // Import icons
|
||||
|
||||
// Helper to format date strings
|
||||
const formatDate = (dateString) => {
|
||||
if (!dateString) return 'N/A';
|
||||
try {
|
||||
// Assuming dateString is ISO 8601 UTC (ends with Z)
|
||||
return new Date(dateString).toLocaleString(undefined, {
|
||||
year: 'numeric', month: 'short', day: 'numeric',
|
||||
hour: '2-digit', minute: '2-digit'
|
||||
});
|
||||
} catch (e) {
|
||||
return dateString; // Return original if formatting fails
|
||||
}
|
||||
};
|
||||
|
||||
// Simple Keyword Tag component (can be shared or kept local)
|
||||
const KeywordTag = ({ keyword }) => (
|
||||
<span className={styles.keywordTag} title={`${keyword.percentage.toFixed(1)}%`}>
|
||||
{keyword.word}
|
||||
</span>
|
||||
);
|
||||
|
||||
/**
|
||||
* UrlDetailPage Component
|
||||
* Fetches, displays, and allows editing of URL details.
|
||||
* Expects `urlId` prop and an `onBack` function prop to navigate back.
|
||||
*/
|
||||
function UrlDetailPage({ urlId, onBack }) {
|
||||
const [urlData, setUrlData] = useState(null); // Original fetched data
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [error, setError] = useState(null);
|
||||
const [isSaving, setIsSaving] = useState(false); // State for save operation
|
||||
const [saveSuccess, setSaveSuccess] = useState(false); // State for success message
|
||||
|
||||
// --- State for Editing ---
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
const [editedTitle, setEditedTitle] = useState('');
|
||||
const [editedSummary, setEditedSummary] = useState('');
|
||||
const [editedNote, setEditedNote] = useState('');
|
||||
// --- End Editing State ---
|
||||
|
||||
// Fetch data when urlId changes
|
||||
const loadUrlData = useCallback(() => {
|
||||
if (!urlId) {
|
||||
setError("No URL ID provided.");
|
||||
setIsLoading(false);
|
||||
setUrlData(null);
|
||||
return;
|
||||
}
|
||||
console.log(`UrlDetailPage: Fetching details for URL ID: ${urlId}`);
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
setUrlData(null);
|
||||
setIsEditing(false);
|
||||
|
||||
fetchUrlDetails(urlId)
|
||||
.then(data => {
|
||||
setUrlData(data);
|
||||
// Initialize edit state when data loads (also ensures reset if data reloads)
|
||||
setEditedTitle(data?.title || '');
|
||||
setEditedSummary(data?.summary || '');
|
||||
setEditedNote(data?.note || '');
|
||||
setIsLoading(false);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(`UrlDetailPage: Failed to fetch URL details for ${urlId}:`, err);
|
||||
setError(err.message || "Failed to load URL details.");
|
||||
setIsLoading(false);
|
||||
});
|
||||
}, [urlId]);
|
||||
|
||||
useEffect(() => {
|
||||
loadUrlData();
|
||||
}, [loadUrlData]);
|
||||
|
||||
// --- Edit Mode Handlers ---
|
||||
const handleEdit = () => {
|
||||
if (!urlData) return;
|
||||
// Re-initialize edit fields with current data when entering edit mode
|
||||
setEditedTitle(urlData.title || '');
|
||||
setEditedSummary(urlData.summary || '');
|
||||
setEditedNote(urlData.note || '');
|
||||
setIsEditing(true);
|
||||
setSaveSuccess(false);
|
||||
setError(null);
|
||||
};
|
||||
|
||||
const handleCancel = () => {
|
||||
setIsEditing(false);
|
||||
setError(null);
|
||||
// No need to reset fields explicitly, they will be re-initialized
|
||||
// from urlData next time edit is clicked.
|
||||
};
|
||||
|
||||
const handleSave = async () => {
|
||||
if (!urlData) return;
|
||||
setIsSaving(true);
|
||||
setError(null);
|
||||
setSaveSuccess(false);
|
||||
|
||||
const updateData = {
|
||||
title: editedTitle,
|
||||
summary: editedSummary,
|
||||
note: editedNote,
|
||||
};
|
||||
|
||||
try {
|
||||
// Pass only changed data (optional optimization, backend handles it)
|
||||
const changedData = {};
|
||||
if (editedTitle !== urlData.title) changedData.title = editedTitle;
|
||||
if (editedSummary !== urlData.summary) changedData.summary = editedSummary;
|
||||
if (editedNote !== urlData.note) changedData.note = editedNote;
|
||||
|
||||
if (Object.keys(changedData).length === 0) {
|
||||
console.log("No changes detected, exiting edit mode.");
|
||||
setIsEditing(false);
|
||||
setIsSaving(false);
|
||||
return; // No need to call API if nothing changed
|
||||
}
|
||||
|
||||
|
||||
const updatedUrl = await updateUrlDetails(urlId, changedData); // Send only changed data
|
||||
// Update local state with the response from the API OR merge changes
|
||||
// Merging changes locally might be smoother if API doesn't return full object
|
||||
setUrlData(prevData => ({
|
||||
...prevData,
|
||||
...changedData // Apply local changes directly
|
||||
// Alternatively, if API returns full updated object: ...updatedUrl
|
||||
}));
|
||||
setIsEditing(false);
|
||||
setSaveSuccess(true);
|
||||
setTimeout(() => setSaveSuccess(false), 2500);
|
||||
} catch (err) {
|
||||
console.error("UrlDetailPage: Failed to save URL details:", err);
|
||||
setError(err.message || "Failed to save changes.");
|
||||
} finally {
|
||||
setIsSaving(false);
|
||||
}
|
||||
};
|
||||
// --- End Edit Mode Handlers ---
|
||||
|
||||
// --- Star Toggle Handler ---
|
||||
const handleToggleStar = async () => {
|
||||
if (!urlData || isSaving || isEditing) return;
|
||||
|
||||
const newStarredStatus = !urlData.starred;
|
||||
const originalStatus = urlData.starred;
|
||||
setUrlData(prevData => ({ ...prevData, starred: newStarredStatus }));
|
||||
|
||||
try {
|
||||
await updateUrlDetails(urlId, { starred: newStarredStatus });
|
||||
} catch (err) {
|
||||
console.error("UrlDetailPage: Failed to update star status:", err);
|
||||
setUrlData(prevData => ({ ...prevData, starred: originalStatus }));
|
||||
alert(`Failed to update star status: ${err.message}`);
|
||||
}
|
||||
};
|
||||
// --- End Star Toggle Handler ---
|
||||
|
||||
// --- Render states ---
|
||||
if (isLoading) {
|
||||
return <div className={styles.statusMessage}><FaSpinner className={styles.spinnerIcon} /> Loading URL Details...</div>;
|
||||
}
|
||||
if (error && !isEditing) {
|
||||
return (
|
||||
<div className={`${styles.statusMessage} ${styles.error}`}>
|
||||
<FaExclamationTriangle /> {error}
|
||||
{onBack && <button onClick={onBack} className={styles.backButton}>Go Back</button>}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
if (!urlData && !isLoading) {
|
||||
return <div className={styles.statusMessage}>URL data not available.</div>;
|
||||
}
|
||||
|
||||
// --- Render URL Details ---
|
||||
return (
|
||||
<div className={styles.detailPageContainer}>
|
||||
{onBack && ( // Always show back button?
|
||||
<button onClick={onBack} className={styles.backButton} disabled={isSaving}>
|
||||
← Back to List
|
||||
</button>
|
||||
)}
|
||||
|
||||
{/* Header: Title, Favicon, Star, Edit/Save/Cancel */}
|
||||
<div className={styles.header}>
|
||||
{urlData.favicon && <img src={urlData.favicon} alt="favicon" className={styles.favicon} onError={(e) => e.target.style.display='none'}/>}
|
||||
|
||||
{/* Editable Title */}
|
||||
{isEditing ? (
|
||||
<input
|
||||
type="text"
|
||||
value={editedTitle}
|
||||
// *** ADDED onChange ***
|
||||
onChange={(e) => setEditedTitle(e.target.value)}
|
||||
className={`${styles.titleInput} ${styles.inputField}`}
|
||||
disabled={isSaving}
|
||||
aria-label="URL Title"
|
||||
/>
|
||||
) : (
|
||||
<h1 className={styles.title}>{urlData.title || 'No Title'}</h1>
|
||||
)}
|
||||
|
||||
<button
|
||||
onClick={handleToggleStar}
|
||||
className={styles.starButton}
|
||||
title={urlData.starred ? "Unstar" : "Star"}
|
||||
disabled={isSaving || isEditing} // Disable while saving/editing other fields
|
||||
>
|
||||
{urlData.starred ? <FaStar /> : <FaRegStar />}
|
||||
</button>
|
||||
|
||||
<div className={styles.editControls}>
|
||||
{isEditing ? (
|
||||
<>
|
||||
<button onClick={handleSave} className={styles.saveButton} disabled={isSaving}>
|
||||
{isSaving ? <FaSpinner className={styles.spinnerIconSmall}/> : <FaSave />} Save
|
||||
</button>
|
||||
<button onClick={handleCancel} className={styles.cancelButton} disabled={isSaving}>
|
||||
<FaTimes /> Cancel
|
||||
</button>
|
||||
</>
|
||||
) : (
|
||||
<button onClick={handleEdit} className={styles.editButton}>
|
||||
<FaEdit /> Edit
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Display Save Error/Success Messages */}
|
||||
{error && isEditing && <p className={`${styles.statusMessage} ${styles.error}`}>{error}</p>}
|
||||
{saveSuccess && <p className={`${styles.statusMessage} ${styles.success}`}><FaCheckCircle/> Saved successfully!</p>}
|
||||
|
||||
|
||||
<a href={urlData.url} target="_blank" rel="noopener noreferrer" className={styles.urlLink}>
|
||||
<FaLink /> {urlData.url}
|
||||
</a>
|
||||
|
||||
{/* Metadata Section */}
|
||||
<div className={styles.metadata}>
|
||||
<span className={styles.metadataItem} title="Processing Status">
|
||||
<FaInfoCircle /> Status: <span className={`${styles.statusBadge} ${styles[urlData.processingStatus]}`}>{urlData.processingStatus}</span>
|
||||
</span>
|
||||
<span className={styles.metadataItem} title="Last Updated">
|
||||
<FaCalendarAlt /> Updated: {formatDate(urlData.updatedAt)}
|
||||
</span>
|
||||
<span className={styles.metadataItem} title="Date Added">
|
||||
<FaCalendarAlt /> Added: {formatDate(urlData.createdAt)}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Summary Section (Editable) */}
|
||||
<div className={styles.section}>
|
||||
<h2 className={styles.sectionTitle}>Summary</h2>
|
||||
{isEditing ? (
|
||||
<textarea
|
||||
value={editedSummary}
|
||||
// *** ADDED onChange ***
|
||||
onChange={(e) => setEditedSummary(e.target.value)}
|
||||
className={`${styles.summaryTextarea} ${styles.inputField}`}
|
||||
rows={5}
|
||||
disabled={isSaving}
|
||||
aria-label="URL Summary"
|
||||
/>
|
||||
) : (
|
||||
<p className={styles.summaryText}>{urlData.summary || <span className={styles.noNote}>No summary available.</span>}</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Keywords Section */}
|
||||
{urlData.keywords && urlData.keywords.length > 0 && (
|
||||
<div className={styles.section}>
|
||||
<h2 className={styles.sectionTitle}><FaTags /> Keywords</h2>
|
||||
<div className={styles.keywordsContainer}>
|
||||
{urlData.keywords.map((kw, index) => <KeywordTag key={index} keyword={kw} />)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Note Section (Editable) */}
|
||||
<div className={styles.section}>
|
||||
<h2 className={styles.sectionTitle}><FaStickyNote /> Note</h2>
|
||||
{isEditing ? (
|
||||
<textarea
|
||||
value={editedNote}
|
||||
// *** ADDED onChange ***
|
||||
onChange={(e) => setEditedNote(e.target.value)}
|
||||
className={`${styles.noteTextarea} ${styles.inputField}`}
|
||||
rows={4}
|
||||
disabled={isSaving}
|
||||
aria-label="User Note"
|
||||
/>
|
||||
) : (
|
||||
<div className={styles.noteContent}>
|
||||
{urlData.note || <span className={styles.noNote}>No note added yet.</span>}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default UrlDetailPage;
|
||||
@ -0,0 +1,293 @@
|
||||
/* frontend/src/components/UrlDetailPage/UrlDetailPage.module.css */
|
||||
|
||||
.detailPageContainer {
|
||||
padding: var(--spacing-lg);
|
||||
background-color: var(--white-color);
|
||||
border-radius: var(--border-radius-lg);
|
||||
box-shadow: var(--shadow-md);
|
||||
border: 1px solid var(--border-color);
|
||||
margin-top: var(--spacing-lg);
|
||||
}
|
||||
|
||||
.statusMessage {
|
||||
padding: var(--spacing-xl);
|
||||
text-align: center;
|
||||
font-size: 1.1em;
|
||||
color: var(--text-color-secondary);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: var(--spacing-sm);
|
||||
}
|
||||
|
||||
.statusMessage.error {
|
||||
color: var(--danger-color);
|
||||
background-color: rgba(220, 53, 69, 0.05); /* Light red background */
|
||||
border: 1px solid rgba(220, 53, 69, 0.2);
|
||||
border-radius: var(--border-radius-sm);
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
margin-top: var(--spacing-sm);
|
||||
font-size: 0.95em;
|
||||
}
|
||||
.statusMessage.success {
|
||||
color: var(--success-color); /* Use variable */
|
||||
background-color: rgba(40, 167, 69, 0.05); /* Light green background */
|
||||
border: 1px solid rgba(40, 167, 69, 0.2);
|
||||
border-radius: var(--border-radius-sm);
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
margin-top: var(--spacing-sm);
|
||||
font-size: 0.95em;
|
||||
}
|
||||
|
||||
|
||||
.spinnerIcon {
|
||||
animation: spin 1s linear infinite;
|
||||
}
|
||||
.spinnerIconSmall {
|
||||
display: inline-block; /* Allow margin */
|
||||
animation: spin 1s linear infinite;
|
||||
margin-right: 5px; /* Space after spinner */
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
0% { transform: rotate(0deg); }
|
||||
100% { transform: rotate(360deg); }
|
||||
}
|
||||
|
||||
.backButton {
|
||||
background: none;
|
||||
border: 1px solid var(--border-color);
|
||||
padding: var(--spacing-xs) var(--spacing-md);
|
||||
border-radius: var(--border-radius-md);
|
||||
cursor: pointer;
|
||||
color: var(--text-color-secondary);
|
||||
margin-bottom: var(--spacing-lg);
|
||||
transition: var(--transition-base);
|
||||
font-size: 0.9em;
|
||||
}
|
||||
.backButton:hover {
|
||||
background-color: var(--light-color);
|
||||
border-color: #bbb;
|
||||
color: var(--text-color-primary);
|
||||
}
|
||||
.backButton:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--spacing-md);
|
||||
margin-bottom: var(--spacing-xs);
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
padding-bottom: var(--spacing-md);
|
||||
}
|
||||
|
||||
.favicon {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
object-fit: contain;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.title {
|
||||
font-size: 1.8em;
|
||||
font-weight: 600;
|
||||
color: var(--text-color-primary);
|
||||
margin: 0;
|
||||
flex-grow: 1; /* Allow title to take space */
|
||||
line-height: 1.2;
|
||||
/* Ensure it doesn't push buttons too far in edit mode */
|
||||
min-width: 50px;
|
||||
}
|
||||
|
||||
.starButton {
|
||||
background: none;
|
||||
border: none;
|
||||
color: #ffc107; /* Yellow for stars */
|
||||
cursor: pointer;
|
||||
padding: 5px;
|
||||
font-size: 1.3em; /* Make star slightly larger */
|
||||
transition: var(--transition-fast);
|
||||
line-height: 1;
|
||||
flex-shrink: 0; /* Prevent shrinking */
|
||||
}
|
||||
.starButton:hover {
|
||||
color: #e0a800;
|
||||
transform: scale(1.1);
|
||||
}
|
||||
.starButton:disabled {
|
||||
color: #ccc; /* Muted color when disabled */
|
||||
cursor: not-allowed;
|
||||
transform: none;
|
||||
}
|
||||
|
||||
|
||||
.urlLink {
|
||||
display: inline-block;
|
||||
color: var(--primary-color);
|
||||
text-decoration: none;
|
||||
word-break: break-all;
|
||||
margin-bottom: var(--spacing-md);
|
||||
font-size: 0.95em;
|
||||
}
|
||||
.urlLink svg {
|
||||
margin-right: var(--spacing-xs);
|
||||
vertical-align: middle;
|
||||
}
|
||||
.urlLink:hover {
|
||||
text-decoration: underline;
|
||||
color: var(--primary-hover-color);
|
||||
}
|
||||
|
||||
.metadata {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: var(--spacing-lg);
|
||||
font-size: 0.85em;
|
||||
color: var(--text-color-secondary);
|
||||
margin-bottom: var(--spacing-lg);
|
||||
padding-bottom: var(--spacing-md);
|
||||
border-bottom: 1px dashed var(--border-color);
|
||||
}
|
||||
.metadataItem { display: flex; align-items: center; gap: var(--spacing-xs); }
|
||||
.metadataItem svg { font-size: 1.1em; }
|
||||
.statusBadge { display: inline-block; padding: 2px 8px; border-radius: 12px; font-weight: 500; font-size: 0.9em; text-transform: capitalize; }
|
||||
.statusBadge.completed { background-color: #d4edda; color: #155724; }
|
||||
.statusBadge.pending { background-color: #fff3cd; color: #856404; }
|
||||
.statusBadge.failed { background-color: #f8d7da; color: #721c24; }
|
||||
|
||||
.section { margin-bottom: var(--spacing-lg); }
|
||||
.sectionTitle { font-size: 1.1em; font-weight: 600; color: var(--text-color-primary); margin-top: 0; margin-bottom: var(--spacing-sm); display: flex; align-items: center; gap: var(--spacing-sm); }
|
||||
.sectionTitle svg { color: var(--text-color-secondary); }
|
||||
|
||||
.summaryText,
|
||||
.noteContent {
|
||||
font-size: 0.95em;
|
||||
line-height: 1.6;
|
||||
color: var(--text-color-primary);
|
||||
white-space: pre-wrap;
|
||||
background-color: var(--light-color);
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
border-radius: var(--border-radius-sm);
|
||||
border: 1px solid var(--border-color);
|
||||
min-height: 40px; /* Ensure some height even if empty */
|
||||
}
|
||||
.noNote { font-style: italic; color: var(--text-color-secondary); }
|
||||
|
||||
.keywordsContainer { display: flex; flex-wrap: wrap; gap: var(--spacing-sm); }
|
||||
.keywordTag { background-color: #e9ecef; color: #495057; padding: 3px 10px; border-radius: 15px; font-size: 0.85em; white-space: nowrap; cursor: default; transition: var(--transition-fast); }
|
||||
.keywordTag:hover { background-color: #dee2e6; }
|
||||
|
||||
/* --- Edit Mode Styles --- */
|
||||
.editControls {
|
||||
display: flex;
|
||||
gap: var(--spacing-sm);
|
||||
flex-shrink: 0; /* Prevent shrinking */
|
||||
}
|
||||
|
||||
.inputField { /* Common styles for edit inputs */
|
||||
width: 100%;
|
||||
padding: var(--spacing-sm) var(--spacing-md);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: var(--border-radius-sm);
|
||||
font-size: 1em; /* Match display font size */
|
||||
font-family: inherit;
|
||||
background-color: #fff; /* Explicit white background */
|
||||
transition: var(--transition-base);
|
||||
box-sizing: border-box; /* Include padding/border in width */
|
||||
color: var(--text-color-primary); /* <<< FIX: Explicitly set text color */
|
||||
}
|
||||
.inputField:focus {
|
||||
outline: none;
|
||||
border-color: var(--primary-color);
|
||||
box-shadow: 0 0 0 2px rgba(178, 227, 182, 0.25); /* Adjusted focus color to match primary */
|
||||
}
|
||||
.inputField:disabled {
|
||||
background-color: #e9ecef;
|
||||
cursor: not-allowed;
|
||||
color: var(--text-color-secondary); /* Dim text color when disabled */
|
||||
}
|
||||
|
||||
|
||||
.titleInput {
|
||||
font-size: 1.8em; /* Match h1 */
|
||||
font-weight: 600; /* Match h1 */
|
||||
line-height: 1.2; /* Match h1 */
|
||||
flex-grow: 1; /* Allow input to grow */
|
||||
min-width: 100px; /* Prevent becoming too small */
|
||||
/* Inherits .inputField styles including color */
|
||||
}
|
||||
|
||||
.summaryTextarea,
|
||||
.noteTextarea {
|
||||
font-size: 0.95em; /* Match p */
|
||||
line-height: 1.6; /* Match p */
|
||||
resize: vertical; /* Allow vertical resize */
|
||||
min-height: 80px;
|
||||
/* Inherits .inputField styles including color */
|
||||
}
|
||||
|
||||
/* Edit/Save/Cancel Button Styles */
|
||||
.editButton, .saveButton, .cancelButton {
|
||||
padding: 6px 12px;
|
||||
border-radius: var(--border-radius-md);
|
||||
cursor: pointer;
|
||||
border: 1px solid transparent;
|
||||
font-size: 0.9em;
|
||||
font-weight: 500;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--spacing-xs);
|
||||
transition: var(--transition-base);
|
||||
}
|
||||
.editButton svg, .saveButton svg, .cancelButton svg {
|
||||
font-size: 1.1em;
|
||||
}
|
||||
|
||||
.editButton {
|
||||
background-color: var(--secondary-color);
|
||||
color: var(--text-color-light);
|
||||
border-color: var(--secondary-color);
|
||||
}
|
||||
.editButton:hover {
|
||||
background-color: var(--secondary-hover-color);
|
||||
border-color: var(--secondary-hover-color);
|
||||
}
|
||||
|
||||
.saveButton {
|
||||
background-color: var(--success-color);
|
||||
color: #fff;
|
||||
border-color: var(--success-color);
|
||||
}
|
||||
.saveButton:hover {
|
||||
background-color: #58a85c; /* Darker success */
|
||||
border-color: #58a85c;
|
||||
}
|
||||
.saveButton:disabled {
|
||||
background-color: #a3d9a5;
|
||||
border-color: #a3d9a5;
|
||||
cursor: not-allowed;
|
||||
color: #f0f0f0;
|
||||
}
|
||||
|
||||
|
||||
.cancelButton {
|
||||
background-color: transparent;
|
||||
color: var(--text-color-secondary);
|
||||
border-color: var(--border-color);
|
||||
}
|
||||
.cancelButton:hover {
|
||||
background-color: var(--light-color);
|
||||
color: var(--text-color-primary);
|
||||
}
|
||||
.cancelButton:disabled {
|
||||
color: #ccc;
|
||||
cursor: not-allowed;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
/* --- End Edit Mode Styles --- */
|
||||
|
||||
1
frontend_react/src/components/react.svg
Normal file
1
frontend_react/src/components/react.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 4.0 KiB |
82
frontend_react/src/index.css
Normal file
82
frontend_react/src/index.css
Normal file
@ -0,0 +1,82 @@
|
||||
/* ./index.css */
|
||||
|
||||
:root {
|
||||
font-family: system-ui, Avenir, Helvetica, Arial, sans-serif;
|
||||
line-height: 1.5;
|
||||
font-weight: 400;
|
||||
|
||||
color-scheme: light dark;
|
||||
color: rgba(255, 255, 255, 0.87);
|
||||
background-color: #242424;
|
||||
|
||||
font-synthesis: none;
|
||||
text-rendering: optimizeLegibility;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
a {
|
||||
font-weight: 500;
|
||||
color: #646cff;
|
||||
text-decoration: inherit;
|
||||
}
|
||||
a:hover {
|
||||
color: #535bf2;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0; /* Keep margin reset */
|
||||
/* display: flex; */ /* REMOVED or COMMENTED OUT */
|
||||
/* place-items: center; */ /* REMOVED or COMMENTED OUT */
|
||||
min-width: 320px; /* Keep min-width if needed */
|
||||
min-height: 100vh; /* Keep min-height */
|
||||
/* You might want body to have the background color instead of :root for dark mode */
|
||||
/* background-color: #242424; */ /* Moved from :root potentially */
|
||||
}
|
||||
|
||||
/* Optional: Ensure #root takes full height */
|
||||
#root {
|
||||
min-height: 100vh;
|
||||
/* display: flex; */ /* Ensure #root doesn't interfere either, usually not needed */
|
||||
/* flex-direction: column; */
|
||||
}
|
||||
|
||||
|
||||
h1 {
|
||||
font-size: 3.2em;
|
||||
line-height: 1.1;
|
||||
}
|
||||
|
||||
button {
|
||||
border-radius: 8px;
|
||||
border: 1px solid transparent;
|
||||
padding: 0.6em 1.2em;
|
||||
font-size: 1em;
|
||||
font-weight: 500;
|
||||
font-family: inherit;
|
||||
background-color: #1a1a1a;
|
||||
cursor: pointer;
|
||||
transition: border-color 0.25s;
|
||||
}
|
||||
button:hover {
|
||||
border-color: #646cff;
|
||||
}
|
||||
button:focus,
|
||||
button:focus-visible {
|
||||
outline: 4px auto -webkit-focus-ring-color;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: light) {
|
||||
:root {
|
||||
color: #213547;
|
||||
background-color: #ffffff;
|
||||
}
|
||||
a:hover {
|
||||
color: #747bff;
|
||||
}
|
||||
button {
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
/* Optionally set light mode body background here if moved from :root */
|
||||
/* body { background-color: #ffffff; } */
|
||||
}
|
||||
10
frontend_react/src/main.jsx
Normal file
10
frontend_react/src/main.jsx
Normal file
@ -0,0 +1,10 @@
|
||||
import React from 'react'
|
||||
import ReactDOM from 'react-dom/client'
|
||||
import App from './App.jsx' // <--- 注意这里!
|
||||
import './index.css' // 或者其他全局 CSS 文件
|
||||
|
||||
ReactDOM.createRoot(document.getElementById('root')).render(
|
||||
<React.StrictMode>
|
||||
<App />
|
||||
</React.StrictMode>,
|
||||
)
|
||||
0
frontend_react/src/services/.Rhistory
Normal file
0
frontend_react/src/services/.Rhistory
Normal file
603
frontend_react/src/services/api.js
Normal file
603
frontend_react/src/services/api.js
Normal file
@ -0,0 +1,603 @@
|
||||
// frontend/src/services/api.js
|
||||
|
||||
// Base URL for your Flask API - IMPORTANT: Adjust if your backend runs elsewhere
|
||||
// 后端 Flask API 的基础 URL - 重要:如果后端运行在别处,请调整
|
||||
const API_BASE_URL = 'http://localhost:5000'; // Assuming Flask runs on port 5000
|
||||
|
||||
// --- Helper function to get Auth Token ---
|
||||
// --- 获取认证 Token 的辅助函数 ---
|
||||
export const getAuthToken = () => {
|
||||
return localStorage.getItem('authToken');
|
||||
};
|
||||
|
||||
// --- Helper function to handle 401 Unauthorized ---
|
||||
// --- 处理 401 未授权错误的辅助函数 ---
|
||||
const handleUnauthorized = () => {
|
||||
console.warn("API: Received 401 Unauthorized. Logging out and reloading.");
|
||||
// 清除无效的 token
|
||||
localStorage.removeItem('authToken');
|
||||
// 重新加载页面以强制重新登录
|
||||
window.location.reload();
|
||||
// 抛出错误以停止当前的 Promise 链
|
||||
throw new Error("Authentication failed. Please log in again.");
|
||||
};
|
||||
|
||||
// --- Login Function ---
|
||||
// --- 登录函数 ---
|
||||
export const loginUser = async (username, password) => {
|
||||
console.log('API: Attempting login with username...');
|
||||
try {
|
||||
const response = await fetch(`${API_BASE_URL}/api/login`, { // Endpoint from auth.py
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ username: username, password: password }),
|
||||
});
|
||||
const data = await response.json();
|
||||
if (!response.ok) throw new Error(data.message || `HTTP error! status: ${response.status}`);
|
||||
if (data.token) {
|
||||
console.log('API: Login successful, token received.');
|
||||
return data; // Contains { message, token, user_id }
|
||||
} else {
|
||||
throw new Error('Login successful but no token received from server.');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("API Error during login:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// --- Project API Functions ---
|
||||
|
||||
/**
|
||||
* Fetches the list of projects (summary view).
|
||||
* 获取项目列表(摘要视图)。
|
||||
* Corresponds to GET /api/projects
|
||||
*/
|
||||
export const fetchProjects = async () => {
|
||||
console.log('API: Fetching projects...');
|
||||
const token = getAuthToken();
|
||||
// 如果在发送请求前就没有 token,立即触发登出
|
||||
if (!token) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${API_BASE_URL}/api/projects`, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
if (!response.ok) {
|
||||
// *** 特别检查 401 状态码 ***
|
||||
if (response.status === 401) {
|
||||
handleUnauthorized(); // 触发登出和重新加载
|
||||
}
|
||||
// 对于其他错误,正常抛出
|
||||
const errorData = await response.json().catch(() => ({ message: 'Failed to parse error response' }));
|
||||
throw new Error(errorData.message || `HTTP error! status: ${response.status}`);
|
||||
}
|
||||
const data = await response.json();
|
||||
// 后端返回 { projects: [...] } -> 映射 project_id 到 id
|
||||
return (data.projects || []).map(p => ({ ...p, id: p.project_id || p.id || p._id })); // 处理不同的 ID 键可能性
|
||||
} catch (error) {
|
||||
// 避免记录由 handleUnauthorized 生成的 "Authentication failed" 错误
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error("API Error fetching projects:", error);
|
||||
}
|
||||
throw error; // 重新抛出错误,以便组件级别可以处理(如果需要)
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetches detailed information for a specific project.
|
||||
* 获取特定项目的详细信息。
|
||||
* Corresponds to GET /api/projects/<project_id>
|
||||
*/
|
||||
export const fetchProjectDetails = async (projectId) => {
|
||||
console.log(`API: Fetching details for project ${projectId}...`);
|
||||
const token = getAuthToken();
|
||||
if (!token) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${API_BASE_URL}/api/projects/${projectId}`, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
if (!response.ok) {
|
||||
// *** 特别检查 401 状态码 ***
|
||||
if (response.status === 401) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
if (response.status === 404) throw new Error(`Project with ID ${projectId} not found.`);
|
||||
const errorData = await response.json().catch(() => ({}));
|
||||
throw new Error(`HTTP error! status: ${response.status} - ${errorData.message || 'Failed to fetch details'}`);
|
||||
}
|
||||
const details = await response.json();
|
||||
// 确保 'id' 字段存在,如果需要,从 '_id' 映射
|
||||
if (details._id && !details.id) {
|
||||
details.id = String(details._id); // Ensure it's a string
|
||||
} else if (details.id) {
|
||||
details.id = String(details.id); // Ensure it's a string
|
||||
}
|
||||
return details;
|
||||
} catch (error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error(`API Error fetching details for project ${projectId}:`, error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetches URLs for a specific project.
|
||||
* 获取特定项目的 URL 列表。
|
||||
* Corresponds to GET /api/projects/<project_id>/urls
|
||||
*/
|
||||
export const fetchProjectUrls = async (projectId) => {
|
||||
console.log(`API: Fetching URLs for project ${projectId}...`);
|
||||
const token = getAuthToken();
|
||||
if (!token) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
try {
|
||||
// *** 确认后端路由是 /api/projects/<project_id>/urls ***
|
||||
const response = await fetch(`${API_BASE_URL}/api/projects/${projectId}/urls`, {
|
||||
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' },
|
||||
});
|
||||
if (!response.ok) {
|
||||
// *** 特别检查 401 状态码 ***
|
||||
if (response.status === 401) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
const errorData = await response.json().catch(() => ({}));
|
||||
throw new Error(`HTTP error! status: ${response.status} - ${errorData.message || 'Failed to fetch URLs'}`);
|
||||
}
|
||||
const data = await response.json();
|
||||
// 后端返回 { urls: [...] } -> 映射 _id 到 id
|
||||
return (data.urls || []).map(url => ({ ...url, id: String(url._id || url.id) })); // Ensure id is string
|
||||
} catch (error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error(`API Error fetching URLs for project ${projectId}:`, error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// --- Functions below also need the 401 check ---
|
||||
// --- 下面的函数同样需要 401 检查 ---
|
||||
|
||||
/**
|
||||
* Adds a URL to a specific project.
|
||||
* 添加 URL 到特定项目。
|
||||
*/
|
||||
export const addUrlToProject = async (projectId, url) => {
|
||||
console.log(`API: Adding URL ${url} to project ${projectId}...`);
|
||||
const token = getAuthToken();
|
||||
if (!token) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
try {
|
||||
// *** 确认后端路由是 /api/projects/<project_id>/urls ***
|
||||
const response = await fetch(`${API_BASE_URL}/api/projects/${projectId}/urls`, {
|
||||
method: 'POST',
|
||||
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ url: url }), // 根据后端要求发送 url
|
||||
});
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
const errorData = await response.json().catch(() => ({}));
|
||||
throw new Error(`HTTP error! status: ${response.status} - ${errorData.message || 'Unknown error adding URL'}`);
|
||||
}
|
||||
const addedUrlData = await response.json();
|
||||
// 映射 url_id (来自后端) 到 id
|
||||
if (addedUrlData.url_id) addedUrlData.id = String(addedUrlData.url_id);
|
||||
return addedUrlData; // 应包含 { message, url_id }
|
||||
} catch (error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error("API Error adding URL:", error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const fetchUrlDetails = async (urlId) => {
|
||||
console.log(`API: Fetching details for URL ${urlId}...`);
|
||||
const token = getAuthToken();
|
||||
if (!token) {
|
||||
handleUnauthorized(); // Handle missing token immediately
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${API_BASE_URL}/api/urls/${urlId}`, {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) {
|
||||
handleUnauthorized(); // Handle expired/invalid token
|
||||
}
|
||||
// Handle URL not found specifically
|
||||
if (response.status === 404) {
|
||||
throw new Error(`URL with ID ${urlId} not found.`);
|
||||
}
|
||||
// Handle other errors
|
||||
const errorData = await response.json().catch(() => ({})); // Try to get error message
|
||||
throw new Error(errorData.message || `HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
const urlDetails = await response.json();
|
||||
// Ensure 'id' field exists, map from '_id' if necessary
|
||||
if (urlDetails._id && !urlDetails.id) {
|
||||
urlDetails.id = String(urlDetails._id);
|
||||
} else if (urlDetails.id) {
|
||||
urlDetails.id = String(urlDetails.id);
|
||||
}
|
||||
console.log("API: Fetched URL details:", urlDetails);
|
||||
return urlDetails; // Return the detailed data
|
||||
|
||||
} catch (error) {
|
||||
// Avoid logging the specific "Authentication failed" error again
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error(`API Error fetching details for URL ${urlId}:`, error);
|
||||
}
|
||||
throw error; // Re-throw the error for the component to handle
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes a URL from a project.
|
||||
* 从项目中删除 URL。
|
||||
* NOTE: Backend route seems to be /api/urls/<url_id>. Double-check routes/urls.py
|
||||
* 注意:后端路由似乎是 /api/urls/<url_id>。请检查 routes/urls.py
|
||||
*/
|
||||
export const deleteUrlFromProject = async (urlIdToDelete) => {
|
||||
console.log(`API: Deleting URL ${urlIdToDelete}...`);
|
||||
const token = getAuthToken();
|
||||
if (!token) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
try {
|
||||
// *** 根据 routes/urls.py 调整端点 ***
|
||||
const response = await fetch(`${API_BASE_URL}/api/urls/${urlIdToDelete}`, {
|
||||
method: 'DELETE',
|
||||
headers: { 'Authorization': `Bearer ${token}` },
|
||||
});
|
||||
// 检查 200 OK 或 204 No Content 表示成功删除
|
||||
if (!response.ok && response.status !== 204) {
|
||||
if (response.status === 401) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
const errorData = await response.json().catch(() => ({}));
|
||||
throw new Error(`HTTP error! status: ${response.status} - ${errorData.message || 'Delete URL failed'}`);
|
||||
}
|
||||
console.log(`Deletion successful for ${urlIdToDelete}`);
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error(`API Error deleting URL ${urlIdToDelete}:`, error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Triggers regeneration/reprocessing of a URL (e.g., summary).
|
||||
* 触发 URL 的重新生成/处理(例如摘要)。
|
||||
* NOTE: Backend routes needed for this (e.g., /api/urls/<url_id>/summarize)
|
||||
* 注意:需要后端路由(例如 /api/urls/<url_id>/summarize)
|
||||
*/
|
||||
export const regenerateSummary = async (urlIdToRegen) => {
|
||||
console.log(`API: Regenerating summary for URL ${urlIdToRegen}...`);
|
||||
const token = getAuthToken();
|
||||
if (!token) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
try {
|
||||
// *** 使用正确的后端端点 (例如 /summarize 或 /extract_title_and_keywords) ***
|
||||
// 检查 backend/routes/urls.py 中的 @urls_bp.route('/urls/<url_id>/summarize', methods=['PUT'])
|
||||
const response = await fetch(`${API_BASE_URL}/api/urls/${urlIdToRegen}/summarize`, {
|
||||
method: 'PUT', // 或者 POST,取决于后端
|
||||
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' },
|
||||
// body: JSON.stringify({}) // 如果需要,可选的 body
|
||||
});
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
// 如果后端将任务排队,处理 202 Accepted
|
||||
if (response.status === 202) {
|
||||
console.log(`API: Regeneration task queued for ${urlIdToRegen}`);
|
||||
// 返回一些东西来指示排队,也许是原始 URL 数据?
|
||||
// 或者只是一个成功消息。取决于前端如何处理排队。
|
||||
return { message: "Regeneration task queued.", status: 'pending' };
|
||||
}
|
||||
const errorData = await response.json().catch(() => ({}));
|
||||
throw new Error(`HTTP error! status: ${response.status} - ${errorData.message || 'Regeneration failed'}`);
|
||||
}
|
||||
// 如果后端同步处理并返回更新后的数据:
|
||||
const updatedUrlData = await response.json();
|
||||
if (updatedUrlData._id) updatedUrlData.id = String(updatedUrlData._id); // Ensure id is string
|
||||
return updatedUrlData; // 返回更新后的数据
|
||||
} catch (error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error(`API Error regenerating summary for ${urlIdToRegen}:`, error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// --- Project Modification API Functions (Add 401 checks similarly) ---
|
||||
// --- 项目修改 API 函数(类似地添加 401 检查)---
|
||||
|
||||
export const createProject = async (projectData) => {
|
||||
console.log('API: Creating new project...', projectData);
|
||||
const token = getAuthToken();
|
||||
if (!token) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
try {
|
||||
const response = await fetch(`${API_BASE_URL}/api/projects`, {
|
||||
method: 'POST',
|
||||
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(projectData),
|
||||
});
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) { handleUnauthorized(); }
|
||||
const data = await response.json().catch(() => ({}));
|
||||
throw new Error(data.message || `HTTP error! status: ${response.status}`);
|
||||
}
|
||||
const result = await response.json(); // Returns { message, project_id, passkey }
|
||||
// Map project_id to id for consistency
|
||||
if (result.project_id) result.id = String(result.project_id);
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error("API Error creating project:", error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const updateProject = async (projectId, updateData) => {
|
||||
console.log(`API: Updating project ${projectId}...`, updateData);
|
||||
const token = getAuthToken();
|
||||
if (!token) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
try {
|
||||
const response = await fetch(`${API_BASE_URL}/api/projects/${projectId}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(updateData),
|
||||
});
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) { handleUnauthorized(); }
|
||||
const data = await response.json().catch(() => ({}));
|
||||
if (response.status === 403) throw new Error("Permission denied to update project.");
|
||||
if (response.status === 404) throw new Error("Project not found for update.");
|
||||
throw new Error(data.message || `HTTP error! status: ${response.status}`);
|
||||
}
|
||||
const result = await response.json(); // Returns { message, project? }
|
||||
// Ensure returned project also has 'id'
|
||||
if (result.project && result.project._id && !result.project.id) {
|
||||
result.project.id = String(result.project._id);
|
||||
}
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error(`API Error updating project ${projectId}:`, error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const deleteProject = async (projectId) => {
|
||||
console.log(`API: Deleting project ${projectId}...`);
|
||||
const token = getAuthToken();
|
||||
if (!token) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
try {
|
||||
const response = await fetch(`${API_BASE_URL}/api/projects/${projectId}`, {
|
||||
method: 'DELETE',
|
||||
headers: { 'Authorization': `Bearer ${token}` },
|
||||
});
|
||||
// Handle 204 No Content specifically
|
||||
if (response.status === 204) return { message: "Project deleted successfully." };
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) { handleUnauthorized(); }
|
||||
const data = await response.json().catch(() => ({}));
|
||||
if (response.status === 403) throw new Error("Permission denied to delete project.");
|
||||
if (response.status === 404) throw new Error("Project not found for deletion.");
|
||||
throw new Error(data.message || `HTTP error! status: ${response.status}`);
|
||||
}
|
||||
return await response.json(); // Returns { message } if backend sends 200
|
||||
} catch (error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error(`API Error deleting project ${projectId}:`, error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const recalculateProjectKeywords = async (projectId) => {
|
||||
console.log(`API: Recalculating keywords for project ${projectId}...`);
|
||||
const token = getAuthToken();
|
||||
if (!token) {
|
||||
handleUnauthorized();
|
||||
}
|
||||
try {
|
||||
// *** 确认后端路由是 /api/projects/<project_id>/recalc_keywords ***
|
||||
const response = await fetch(`${API_BASE_URL}/api/projects/${projectId}/recalc_keywords`, {
|
||||
method: 'PUT', // 或者 POST,取决于后端
|
||||
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' },
|
||||
// body: JSON.stringify({}) // 如果需要,可选的 body
|
||||
});
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) { handleUnauthorized(); }
|
||||
const data = await response.json().catch(() => ({}));
|
||||
if (response.status === 403) throw new Error("Permission denied.");
|
||||
if (response.status === 404) throw new Error("Project not found.");
|
||||
throw new Error(data.message || `HTTP error! status: ${response.status}`);
|
||||
}
|
||||
return await response.json(); // Returns { message, keywords }
|
||||
} catch (error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error(`API Error recalculating keywords for ${projectId}:`, error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// --- API Key Management Functions (Add 401 checks similarly) ---
|
||||
// --- API 密钥管理函数(类似地添加 401 检查)---
|
||||
export const addApiKey = async (provider, key) => {
|
||||
console.log(`API: Storing API key for provider ${provider}...`);
|
||||
const token = getAuthToken();
|
||||
if (!token) { handleUnauthorized(); }
|
||||
try {
|
||||
// *** 确认后端路由是 /api/api_list ***
|
||||
const response = await fetch(`${API_BASE_URL}/api/api_list`, {
|
||||
method: 'POST',
|
||||
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ name: provider, key: key, selected: true }), // Example body
|
||||
});
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) { handleUnauthorized(); }
|
||||
const data = await response.json().catch(() => ({}));
|
||||
throw new Error(data.message || `HTTP error! status: ${response.status}`);
|
||||
}
|
||||
return await response.json();
|
||||
} catch (error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error("API Error adding API key:", error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const getApiKeys = async () => {
|
||||
console.log('API: Fetching stored API keys...');
|
||||
const token = getAuthToken();
|
||||
if (!token) { handleUnauthorized(); }
|
||||
try {
|
||||
// *** 确认后端路由是 /api/api_list ***
|
||||
const response = await fetch(`${API_BASE_URL}/api/api_list`, {
|
||||
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' },
|
||||
});
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) { handleUnauthorized(); }
|
||||
const data = await response.json().catch(() => ({}));
|
||||
throw new Error(data.message || `HTTP error! status: ${response.status}`);
|
||||
}
|
||||
const data = await response.json();
|
||||
// Map _id to id
|
||||
return (data.api_keys || []).map(key => ({ ...key, id: String(key._id || key.id) }));
|
||||
} catch (error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error("API Error fetching API keys:", error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// --- Global AI Function (Add 401 check similarly) ---
|
||||
// --- 全局 AI 函数(类似地添加 401 检查)---
|
||||
export const askAiAboutProject = async (projectId, context) => {
|
||||
console.log(`API: Asking AI about project ${projectId} with context...`);
|
||||
const token = getAuthToken();
|
||||
if (!token) { handleUnauthorized(); }
|
||||
try {
|
||||
// *** 确认或创建后端路由用于 AI 查询, e.g., /api/projects/<project_id>/ask ***
|
||||
const response = await fetch(`${API_BASE_URL}/api/projects/${projectId}/ask`, { // Example endpoint
|
||||
method: 'POST',
|
||||
headers: { 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ context: context }),
|
||||
});
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) { handleUnauthorized(); }
|
||||
const data = await response.json().catch(() => ({}));
|
||||
throw new Error(data.message || `HTTP error! status: ${response.status}`);
|
||||
}
|
||||
return await response.json(); // Expects { answer: "..." } or similar
|
||||
} catch(error) {
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error("API Error asking AI:", error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
export const updateUrlDetails = async (urlId, updateData) => {
|
||||
console.log(`API: Updating URL ${urlId} with data:`, updateData);
|
||||
const token = getAuthToken();
|
||||
if (!token) {
|
||||
handleUnauthorized(); // Handle missing token immediately
|
||||
}
|
||||
|
||||
// Ensure updateData is an object
|
||||
if (typeof updateData !== 'object' || updateData === null) {
|
||||
throw new Error("Invalid update data provided.");
|
||||
}
|
||||
// Optional: Filter out fields that shouldn't be sent? Backend already does this.
|
||||
|
||||
try {
|
||||
const response = await fetch(`${API_BASE_URL}/api/urls/${urlId}`, {
|
||||
method: 'PUT', // Use PUT as defined in backend/routes/urls.py
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(updateData), // Send only the fields to update
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
if (response.status === 401) {
|
||||
handleUnauthorized(); // Handle expired/invalid token
|
||||
}
|
||||
// Handle URL not found specifically
|
||||
if (response.status === 404) {
|
||||
throw new Error(`URL with ID ${urlId} not found for update.`);
|
||||
}
|
||||
// Handle validation errors from backend (e.g., bad data format)
|
||||
if (response.status === 400) {
|
||||
const errorData = await response.json().catch(() => ({}));
|
||||
throw new Error(errorData.message || `Invalid data provided for update.`);
|
||||
}
|
||||
// Handle other errors
|
||||
const errorData = await response.json().catch(() => ({})); // Try to get error message
|
||||
throw new Error(errorData.message || `HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
||||
// Backend returns { message, url? } - return the updated url data if available
|
||||
const result = await response.json();
|
||||
console.log("API: Update URL response:", result);
|
||||
// Ensure 'id' field exists in the returned URL data
|
||||
if (result.url && result.url._id && !result.url.id) {
|
||||
result.url.id = String(result.url._id);
|
||||
} else if (result.url && result.url.id) {
|
||||
result.url.id = String(result.url.id);
|
||||
}
|
||||
return result.url || { success: true }; // Return updated url or success indicator
|
||||
|
||||
} catch (error) {
|
||||
// Avoid logging the specific "Authentication failed" error again
|
||||
if (error.message !== "Authentication failed. Please log in again.") {
|
||||
console.error(`API Error updating details for URL ${urlId}:`, error);
|
||||
}
|
||||
throw error; // Re-throw the error for the component to handle
|
||||
}
|
||||
};
|
||||
372
frontend_react/src/services/api_test.js
Normal file
372
frontend_react/src/services/api_test.js
Normal file
@ -0,0 +1,372 @@
|
||||
// services/api.js
|
||||
|
||||
// Base URL for your Flask API - IMPORTANT: Adjust if your backend runs elsewhere
|
||||
const API_BASE_URL = 'http://localhost:5000'; // Assuming Flask runs on port 5000
|
||||
|
||||
// --- Helper function to get Auth Token (Still needed for actual login/logout) ---
|
||||
export const getAuthToken = () => {
|
||||
// Example: Retrieve token stored after login
|
||||
return localStorage.getItem('authToken');
|
||||
};
|
||||
|
||||
|
||||
// --- Login Function ---
|
||||
export const loginUser = async (username, password) => {
|
||||
console.log('API: Attempting login with username...');
|
||||
try {
|
||||
const response = await fetch(`${API_BASE_URL}/api/login`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ username: username, password: password }),
|
||||
});
|
||||
const data = await response.json();
|
||||
if (!response.ok) throw new Error(data.message || `HTTP error! status: ${response.status}`);
|
||||
if (data.token) {
|
||||
console.log('API: Login successful, token received.');
|
||||
return data;
|
||||
} else {
|
||||
throw new Error('Login successful but no token received from server.');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("API Error during login:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// --- Placeholder Data (Updated Structures reflecting Schemas) ---
|
||||
const MOCK_PROJECTS = [
|
||||
{ id: 'project1', name: 'Project Alpha' },
|
||||
{ id: 'project2', name: 'Project Beta Research' },
|
||||
{ id: 'project3', name: 'Competitor Analysis' },
|
||||
];
|
||||
|
||||
const MOCK_PROJECT_DETAILS = {
|
||||
project1: {
|
||||
id: 'project1', // Corresponds to _id from backend
|
||||
name: 'Project Alpha',
|
||||
description: 'Initial research for Project Alpha focusing on market trends and existing solutions.',
|
||||
// Changed 'wordCloudData' to 'keywords' matching schema
|
||||
keywords: [ { word: 'market', percentage: 85.5 }, { word: 'research', percentage: 72.1 }, { word: 'trends', percentage: 60.0 }, { word: 'solutions', percentage: 55.9 }, { word: 'AI', percentage: 50.2 }, { word: 'data', percentage: 45.0 } ],
|
||||
topic: 'Market Research', // Optional field from schema
|
||||
summary: 'AI-generated summary about market trends for Alpha.', // Optional field from schema
|
||||
// Other fields like ownerId, collaborators, createdAt etc. could exist
|
||||
},
|
||||
project2: {
|
||||
id: 'project2',
|
||||
name: 'Project Beta Research',
|
||||
description: 'Deep dive into technical specifications for Project Beta.',
|
||||
keywords: [ { word: 'technical', percentage: 90.0 }, { word: 'specs', percentage: 88.2 }, { word: 'beta', percentage: 75.0 }, { word: 'details', percentage: 70.1 } ],
|
||||
topic: 'Technical Specification',
|
||||
summary: 'Summary of Project Beta technical details.'
|
||||
},
|
||||
project3: {
|
||||
id: 'project3',
|
||||
name: 'Competitor Analysis',
|
||||
description: 'Analyzing key competitors in the field.',
|
||||
keywords: [ { word: 'competitor', percentage: 92.3 }, { word: 'analysis', percentage: 89.9 }, { word: 'features', percentage: 81.5 }, { word: 'pricing', percentage: 78.0 } ],
|
||||
topic: 'Business Strategy',
|
||||
summary: 'Analysis summary of main competitors.'
|
||||
}
|
||||
};
|
||||
|
||||
const MOCK_URLS = {
|
||||
project1: [
|
||||
{
|
||||
id: 'url1', // Corresponds to _id
|
||||
projectId: 'project1',
|
||||
url: 'https://example.com/market-trends',
|
||||
title: 'Market Trends Report 2025',
|
||||
summary: 'An overview of current market trends and future predictions.',
|
||||
// Changed 'topic' to 'keywords' matching schema
|
||||
keywords: [{ word: 'trends', percentage: 90.1 }, { word: 'market', percentage: 88.5 }, { word: 'forecast', percentage: 75.3 }],
|
||||
processingStatus: 'completed', // Added processingStatus
|
||||
favicon: 'https://www.google.com/s2/favicons?sz=16&domain_url=example.com', // Example favicon fetch URL
|
||||
starred: false, // Optional field
|
||||
note: '', // Optional field
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
// isLoading: false // Frontend state, not part of backend data model
|
||||
},
|
||||
{
|
||||
id: 'url2',
|
||||
projectId: 'project1',
|
||||
url: 'https://example.com/ai-solutions',
|
||||
title: 'AI Solutions Overview',
|
||||
summary: 'Exploring various AI solutions applicable to the industry problems.',
|
||||
keywords: [{ word: 'AI', percentage: 95.0 }, { word: 'solutions', percentage: 85.0 }, { word: 'ML', percentage: 80.0 }],
|
||||
processingStatus: 'completed',
|
||||
favicon: null,
|
||||
starred: true,
|
||||
note: 'Check this one for implementation details.',
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
},
|
||||
{
|
||||
id: 'url_pending',
|
||||
projectId: 'project1',
|
||||
url: 'https://example.com/newly-added',
|
||||
title: 'Newly Added Page (Processing...)',
|
||||
summary: null, // Summary not yet available
|
||||
keywords: [], // Keywords not yet available
|
||||
processingStatus: 'pending', // Status is pending
|
||||
favicon: null,
|
||||
starred: false,
|
||||
note: '',
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
},
|
||||
{
|
||||
id: 'url_failed',
|
||||
projectId: 'project1',
|
||||
url: 'https://example.com/failed-page',
|
||||
title: 'Failed Page Processing',
|
||||
summary: null,
|
||||
keywords: [],
|
||||
processingStatus: 'failed', // Status is failed
|
||||
favicon: null,
|
||||
starred: false,
|
||||
note: 'Processing failed, maybe retry?',
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
},
|
||||
],
|
||||
project2: [
|
||||
{
|
||||
id: 'url3',
|
||||
projectId: 'project2',
|
||||
url: 'https://example.com/tech-specs-beta',
|
||||
title: 'Project Beta Tech Specs',
|
||||
summary: 'Detailed technical specifications document.',
|
||||
keywords: [{word: 'specification', percentage: 98.0}, {word: 'hardware', percentage: 85.0}],
|
||||
processingStatus: 'completed',
|
||||
favicon: null,
|
||||
starred: false,
|
||||
note: '',
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
},
|
||||
],
|
||||
project3: [], // Start with no URLs
|
||||
};
|
||||
|
||||
const MOCK_API_KEYS = [
|
||||
{ _id: 'key1', name: 'Gemini', updatedAt: '2025-04-11T...', createdAt: '...', selected: true /*, key: '****' */ }, // Key masked/omitted
|
||||
{ _id: 'key2', name: 'OpenAI', updatedAt: '2025-04-10T...', createdAt: '...', selected: false /*, key: '****' */ },
|
||||
];
|
||||
|
||||
|
||||
// --- API Functions (Temporarily bypassing token checks for debugging) ---
|
||||
|
||||
/**
|
||||
* Fetches the list of projects.
|
||||
*/
|
||||
export const fetchProjects = async () => {
|
||||
console.log('API: Fetching projects...');
|
||||
// const token = getAuthToken();
|
||||
// // --- TEMPORARILY COMMENTED OUT FOR DEBUGGING ---
|
||||
// if (!token) return Promise.reject(new Error("Not authenticated"));
|
||||
// --- END TEMPORARY COMMENT ---
|
||||
|
||||
try {
|
||||
// Replace with actual fetch call later
|
||||
// const response = await fetch(`${API_BASE_URL}/api/projects`, { headers: { 'Authorization': `Bearer ${token}`, ... }});
|
||||
// ... fetch logic ...
|
||||
|
||||
// --- Using Mock Data ---
|
||||
await new Promise(resolve => setTimeout(resolve, 300));
|
||||
console.log('API: Returning MOCK_PROJECTS');
|
||||
return MOCK_PROJECTS;
|
||||
|
||||
} catch (error) {
|
||||
console.error("API Error fetching projects:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetches details for a specific project.
|
||||
*/
|
||||
export const fetchProjectDetails = async (projectId) => {
|
||||
console.log(`API: Fetching details for project ${projectId}...`);
|
||||
// const token = getAuthToken();
|
||||
// // --- TEMPORARILY COMMENTED OUT FOR DEBUGGING ---
|
||||
// if (!token) return Promise.reject(new Error("Not authenticated")); // <--- This was causing the error
|
||||
// --- END TEMPORARY COMMENT ---
|
||||
|
||||
try {
|
||||
// Replace with actual fetch call later
|
||||
// const response = await fetch(`${API_BASE_URL}/api/projects/${projectId}`, { headers: { 'Authorization': `Bearer ${token}`, ... }});
|
||||
// ... fetch logic ...
|
||||
|
||||
// --- Using Mock Data ---
|
||||
await new Promise(resolve => setTimeout(resolve, 300));
|
||||
const details = MOCK_PROJECT_DETAILS[projectId];
|
||||
if (!details) throw new Error(`Project with ID ${projectId} not found.`);
|
||||
console.log(`API: Returning MOCK_PROJECT_DETAILS for ${projectId}`);
|
||||
return details;
|
||||
|
||||
} catch (error) {
|
||||
console.error(`API Error fetching details for project ${projectId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetches URLs for a specific project.
|
||||
*/
|
||||
export const fetchProjectUrls = async (projectId) => {
|
||||
console.log(`API: Fetching URLs for project ${projectId}...`);
|
||||
// const token = getAuthToken();
|
||||
// // --- TEMPORARILY COMMENTED OUT FOR DEBUGGING ---
|
||||
// if (!token) return Promise.reject(new Error("Not authenticated"));
|
||||
// --- END TEMPORARY COMMENT ---
|
||||
|
||||
try {
|
||||
// Replace with actual fetch call later
|
||||
// const response = await fetch(`${API_BASE_URL}/api/projects/${projectId}/urls`, { headers: { 'Authorization': `Bearer ${token}`, ... }});
|
||||
// ... fetch logic ...
|
||||
|
||||
// --- Using Mock Data ---
|
||||
await new Promise(resolve => setTimeout(resolve, 400));
|
||||
console.log(`API: Returning MOCK_URLS for ${projectId}`);
|
||||
return MOCK_URLS[projectId] || [];
|
||||
|
||||
} catch (error) {
|
||||
console.error(`API Error fetching URLs for project ${projectId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Adds a URL to a specific project.
|
||||
* NOTE: This might still fail if backend requires auth, even if frontend bypasses check here.
|
||||
* For mock testing, ensure it returns mock data correctly.
|
||||
*/
|
||||
export const addUrlToProject = async (projectId, url) => {
|
||||
console.log(`API: Adding URL ${url} to project ${projectId}...`);
|
||||
// const token = getAuthToken();
|
||||
// // --- TEMPORARILY COMMENTED OUT FOR DEBUGGING ---
|
||||
// if (!token) return Promise.reject(new Error("Not authenticated"));
|
||||
// --- END TEMPORARY COMMENT ---
|
||||
|
||||
try {
|
||||
// Replace with actual fetch call later
|
||||
// const response = await fetch(`${API_BASE_URL}/api/projects/${projectId}/urls`, { method: 'POST', headers: { 'Authorization': `Bearer ${token}`, ... }, body: ... });
|
||||
// ... fetch logic ...
|
||||
|
||||
// --- Using Mock Data ---
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
if (url.includes('fail')) throw new Error("Mock Error: Failed to process URL");
|
||||
const newId = `url${Date.now()}`;
|
||||
const newCardData = { /* ... create mock data ... */ };
|
||||
// ... add to mock list ...
|
||||
// ... simulate backend processing ...
|
||||
console.log(`API: Returning mock added URL ${newId}`);
|
||||
return newCardData; // Return initial pending state
|
||||
|
||||
} catch (error) {
|
||||
console.error("API Error adding URL:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// --- Functions for API Key Management (Assume these are called from a settings page, might need auth bypass too if testing that page) ---
|
||||
|
||||
export const addApiKey = async (provider, key) => {
|
||||
console.log(`API: Storing API key for provider ${provider}...`);
|
||||
// const token = getAuthToken();
|
||||
// // --- TEMPORARILY COMMENTED OUT FOR DEBUGGING ---
|
||||
// if (!token) return Promise.reject(new Error("Not authenticated"));
|
||||
// --- END TEMPORARY COMMENT ---
|
||||
// ... (rest of the function using fetch or mocks) ...
|
||||
try {
|
||||
// Simulate success for now
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
console.log("API: Mock API Key stored successfully.");
|
||||
return { message: "API key stored successfully." };
|
||||
} catch (error) {
|
||||
console.error("API Error adding/updating API key:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const getApiKeys = async () => {
|
||||
console.log('API: Fetching stored API keys...');
|
||||
// const token = getAuthToken();
|
||||
// // --- TEMPORARILY COMMENTED OUT FOR DEBUGGING ---
|
||||
// if (!token) return Promise.reject(new Error("Not authenticated"));
|
||||
// --- END TEMPORARY COMMENT ---
|
||||
try {
|
||||
// --- Using Mock Data ---
|
||||
await new Promise(resolve => setTimeout(resolve, 300));
|
||||
console.log("API: Returning MOCK_API_KEYS");
|
||||
return MOCK_API_KEYS.map(k => ({ id: k._id, name: k.name, selected: k.selected, updatedAt: k.updatedAt }));
|
||||
} catch (error) {
|
||||
console.error("API Error fetching API keys:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// --- Other API functions (Add similar temporary bypass if needed for debugging them) ---
|
||||
|
||||
export const askAiAboutProject = async (projectId, context) => {
|
||||
console.log(`API: Asking AI about project ${projectId} with context...`);
|
||||
// const token = getAuthToken();
|
||||
// // --- TEMPORARILY COMMENTED OUT FOR DEBUGGING ---
|
||||
// if (!token) return Promise.reject(new Error("Not authenticated"));
|
||||
// --- END TEMPORARY COMMENT ---
|
||||
try {
|
||||
// --- Using Mock Data ---
|
||||
await new Promise(resolve => setTimeout(resolve, 1500));
|
||||
console.log("API: Returning Mock AI Response");
|
||||
return {
|
||||
answer: `Mock AI Response: Based on context for project ${projectId}, the key themes are X, Y, and Z.`
|
||||
};
|
||||
} catch(error) {
|
||||
console.error("API Error asking AI:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const deleteUrlFromProject = async (projectId, urlId) => {
|
||||
console.log(`API: Deleting URL ${urlId} from project ${projectId}...`);
|
||||
// const token = getAuthToken();
|
||||
// // --- TEMPORARILY COMMENTED OUT FOR DEBUGGING ---
|
||||
// if (!token) return Promise.reject(new Error("Not authenticated"));
|
||||
// --- END TEMPORARY COMMENT ---
|
||||
try {
|
||||
// --- Using Mock Data ---
|
||||
await new Promise(resolve => setTimeout(resolve, 300));
|
||||
if (MOCK_URLS[projectId]) {
|
||||
MOCK_URLS[projectId] = MOCK_URLS[projectId].filter(url => url.id !== urlId);
|
||||
}
|
||||
console.log(`Mock deletion of ${urlId}`);
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error(`API Error deleting URL ${urlId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const regenerateSummary = async (projectId, urlId) => {
|
||||
console.log(`API: Regenerating summary for URL ${urlId} in project ${projectId}...`);
|
||||
// const token = getAuthToken();
|
||||
// // --- TEMPORARILY COMMENTED OUT FOR DEBUGGING ---
|
||||
// if (!token) return Promise.reject(new Error("Not authenticated"));
|
||||
// --- END TEMPORARY COMMENT ---
|
||||
try {
|
||||
// --- Using Mock Data ---
|
||||
await new Promise(resolve => setTimeout(resolve, 1800));
|
||||
let updatedUrlData = null;
|
||||
// ... (logic to update mock data) ...
|
||||
if (!updatedUrlData) throw new Error("Mock Error: URL not found for regeneration");
|
||||
console.log(`API: Returning mock regenerated data for ${urlId}`);
|
||||
return updatedUrlData;
|
||||
} catch (error) {
|
||||
console.error(`API Error regenerating summary for ${urlId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
7
frontend_react/vite.config.js
Normal file
7
frontend_react/vite.config.js
Normal file
@ -0,0 +1,7 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import react from '@vitejs/plugin-react'
|
||||
|
||||
// https://vite.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
})
|
||||
1
test/tests_backend/__init__.py
Normal file
1
test/tests_backend/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
|
||||
169
test/tests_backend/test_activity.py
Normal file
169
test/tests_backend/test_activity.py
Normal file
@ -0,0 +1,169 @@
|
||||
import json
|
||||
import pytest
|
||||
from backend.app import create_app
|
||||
|
||||
@pytest.fixture
|
||||
def client():
|
||||
"""
|
||||
Creates a test client from your Flask app.
|
||||
"""
|
||||
app = create_app()
|
||||
app.config["TESTING"] = True
|
||||
with app.test_client() as client:
|
||||
yield client
|
||||
|
||||
@pytest.fixture
|
||||
def auth_headers(client):
|
||||
"""
|
||||
Registers a test user and returns an authorization header.
|
||||
"""
|
||||
import uuid
|
||||
unique_suffix = str(uuid.uuid4())[:8]
|
||||
username = f"activityuser_{unique_suffix}"
|
||||
email = f"{username}@example.com"
|
||||
reg_payload = {"username": username, "email": email, "password": "TestPassword123"}
|
||||
|
||||
reg_resp = client.post("/api/register", data=json.dumps(reg_payload), content_type="application/json")
|
||||
assert reg_resp.status_code == 201, f"Registration failed: {reg_resp.data.decode()}"
|
||||
data = json.loads(reg_resp.data)
|
||||
token = data["token"]
|
||||
return {"Authorization": f"Bearer {token}"}
|
||||
|
||||
@pytest.fixture
|
||||
def project_id(client, auth_headers):
|
||||
"""
|
||||
Creates a project for the user so we can attach activity logs to it.
|
||||
Returns the project ID as a string.
|
||||
"""
|
||||
payload = {
|
||||
"name": "Activity Test Project",
|
||||
"topic": "Log Testing",
|
||||
"description": "Project used for testing activity logs."
|
||||
}
|
||||
resp = client.post("/api/projects", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert resp.status_code == 201, f"Project creation failed: {resp.data.decode()}"
|
||||
data = json.loads(resp.data)
|
||||
return data["project_id"]
|
||||
|
||||
def test_list_activity_empty(client, auth_headers, project_id):
|
||||
"""
|
||||
No logs exist initially, so listing with ?projectId=xxx returns empty array.
|
||||
"""
|
||||
url = f"/api/project_activity?projectId={project_id}"
|
||||
resp = client.get(url, headers=auth_headers)
|
||||
assert resp.status_code == 200, f"List logs failed: {resp.data.decode()}"
|
||||
data = json.loads(resp.data)
|
||||
assert "activity_logs" in data
|
||||
assert len(data["activity_logs"]) == 0
|
||||
|
||||
def test_create_activity(client, auth_headers, project_id):
|
||||
"""
|
||||
Create an activity log for the project, then verify it appears in the listing.
|
||||
"""
|
||||
payload = {
|
||||
"projectId": project_id,
|
||||
"activityType": "URL added",
|
||||
"message": "Added a new URL to the project."
|
||||
}
|
||||
create_resp = client.post("/api/project_activity",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert create_resp.status_code == 201, f"Create activity failed: {create_resp.data.decode()}"
|
||||
create_data = json.loads(create_resp.data)
|
||||
assert "activity_id" in create_data
|
||||
|
||||
# Now list logs
|
||||
list_resp = client.get(f"/api/project_activity?projectId={project_id}", headers=auth_headers)
|
||||
assert list_resp.status_code == 200
|
||||
list_data = json.loads(list_resp.data)
|
||||
logs = list_data.get("activity_logs", [])
|
||||
assert len(logs) == 1
|
||||
assert logs[0]["activityType"] == "URL added"
|
||||
assert logs[0]["message"] == "Added a new URL to the project."
|
||||
|
||||
def test_create_activity_invalid_project(client, auth_headers):
|
||||
"""
|
||||
If projectId is invalid or not found, we expect 400 or 404.
|
||||
"""
|
||||
# Invalid format
|
||||
payload1 = {
|
||||
"projectId": "not_a_valid_objectid",
|
||||
"activityType": "Test",
|
||||
"message": ""
|
||||
}
|
||||
resp1 = client.post("/api/project_activity",
|
||||
data=json.dumps(payload1),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert resp1.status_code == 400, f"Expected 400 for invalid projectId format, got {resp1.status_code}"
|
||||
|
||||
# 404 if project doesn't exist
|
||||
payload2 = {
|
||||
"projectId": "64f3f000000000000000abcd", # random objectId
|
||||
"activityType": "Test",
|
||||
"message": ""
|
||||
}
|
||||
resp2 = client.post("/api/project_activity",
|
||||
data=json.dumps(payload2),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert resp2.status_code == 404, f"Expected 404 for non-existent project, got {resp2.status_code}"
|
||||
|
||||
def test_list_activity_pagination(client, auth_headers, project_id):
|
||||
"""
|
||||
Create multiple logs, then retrieve them with offset/limit.
|
||||
"""
|
||||
# Create 5 logs
|
||||
for i in range(5):
|
||||
pay = {
|
||||
"projectId": project_id,
|
||||
"activityType": f"LogType{i}",
|
||||
"message": f"Message {i}"
|
||||
}
|
||||
resp = client.post("/api/project_activity",
|
||||
data=json.dumps(pay),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert resp.status_code == 201
|
||||
|
||||
# List with limit=2, offset=0
|
||||
resp_list1 = client.get(f"/api/project_activity?projectId={project_id}&limit=2&offset=0", headers=auth_headers)
|
||||
assert resp_list1.status_code == 200
|
||||
data1 = json.loads(resp_list1.data)
|
||||
logs1 = data1["activity_logs"]
|
||||
assert len(logs1) == 2
|
||||
|
||||
# List with limit=2, offset=2
|
||||
resp_list2 = client.get(f"/api/project_activity?projectId={project_id}&limit=2&offset=2", headers=auth_headers)
|
||||
data2 = json.loads(resp_list2.data)
|
||||
logs2 = data2["activity_logs"]
|
||||
assert len(logs2) == 2
|
||||
|
||||
def test_delete_activity(client, auth_headers, project_id):
|
||||
"""
|
||||
By default, only the project owner can delete logs. We'll test that scenario.
|
||||
"""
|
||||
# Create a log
|
||||
payload = {
|
||||
"projectId": project_id,
|
||||
"activityType": "DeleteCheck",
|
||||
"message": "Testing delete."
|
||||
}
|
||||
create_resp = client.post("/api/project_activity",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert create_resp.status_code == 201
|
||||
activity_id = json.loads(create_resp.data)["activity_id"]
|
||||
|
||||
# Delete it
|
||||
del_resp = client.delete(f"/api/project_activity/{activity_id}", headers=auth_headers)
|
||||
assert del_resp.status_code == 200, f"Delete log failed: {del_resp.data.decode()}"
|
||||
|
||||
# Confirm it's gone
|
||||
list_resp = client.get(f"/api/project_activity?projectId={project_id}", headers=auth_headers)
|
||||
data = json.loads(list_resp.data)
|
||||
logs_left = [a for a in data["activity_logs"] if a["_id"] == activity_id]
|
||||
assert len(logs_left) == 0, "Activity log was not deleted properly."
|
||||
162
test/tests_backend/test_api_list.py
Normal file
162
test/tests_backend/test_api_list.py
Normal file
@ -0,0 +1,162 @@
|
||||
import json
|
||||
import pytest
|
||||
from backend.app import create_app
|
||||
|
||||
@pytest.fixture
|
||||
def client():
|
||||
"""
|
||||
Creates a Flask test client from your create_app function.
|
||||
"""
|
||||
app = create_app()
|
||||
app.config["TESTING"] = True
|
||||
with app.test_client() as client:
|
||||
yield client
|
||||
|
||||
@pytest.fixture
|
||||
def auth_headers(client):
|
||||
"""
|
||||
Registers and logs in a test user, returning an authorization header.
|
||||
"""
|
||||
import uuid
|
||||
unique_suffix = str(uuid.uuid4())[:8]
|
||||
username = f"apilistuser_{unique_suffix}"
|
||||
email = f"{username}@example.com"
|
||||
|
||||
reg_payload = {
|
||||
"username": username,
|
||||
"email": email,
|
||||
"password": "TestPassword123"
|
||||
}
|
||||
reg_resp = client.post("/api/register", data=json.dumps(reg_payload), content_type="application/json")
|
||||
assert reg_resp.status_code == 201, f"Registration failed: {reg_resp.data.decode()}"
|
||||
data = json.loads(reg_resp.data)
|
||||
token = data["token"]
|
||||
return {"Authorization": f"Bearer {token}"}
|
||||
|
||||
def test_list_api_keys_empty(client, auth_headers):
|
||||
"""
|
||||
Initially, the user has no API keys.
|
||||
GET /api/api_list should return empty array.
|
||||
"""
|
||||
resp = client.get("/api/api_list", headers=auth_headers)
|
||||
assert resp.status_code == 200, f"List keys failed: {resp.data.decode()}"
|
||||
data = json.loads(resp.data)
|
||||
assert "api_keys" in data
|
||||
assert len(data["api_keys"]) == 0
|
||||
|
||||
def test_create_api_key(client, auth_headers):
|
||||
"""
|
||||
Test creating an API key for e.g. 'Gemini'.
|
||||
"""
|
||||
payload = {
|
||||
"name": "Gemini",
|
||||
"key": "gemini-secret-key",
|
||||
"selected": True
|
||||
}
|
||||
create_resp = client.post("/api/api_list", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert create_resp.status_code == 201, f"Create API key failed: {create_resp.data.decode()}"
|
||||
data = json.loads(create_resp.data)
|
||||
assert "api_id" in data
|
||||
|
||||
# Now list and verify we see the new key
|
||||
list_resp = client.get("/api/api_list", headers=auth_headers)
|
||||
assert list_resp.status_code == 200
|
||||
list_data = json.loads(list_resp.data)
|
||||
keys = list_data.get("api_keys", [])
|
||||
assert len(keys) == 1
|
||||
api_entry = keys[0]
|
||||
assert api_entry["name"] == "Gemini"
|
||||
assert api_entry["key"] == "gemini-secret-key"
|
||||
assert api_entry["selected"] is True
|
||||
|
||||
def test_create_api_key_duplicate(client, auth_headers):
|
||||
"""
|
||||
By default, the code returns 400 if the user already has an API key for the same 'name'.
|
||||
"""
|
||||
# Create first
|
||||
payload1 = {
|
||||
"name": "Chatgpt",
|
||||
"key": "chatgpt-key1",
|
||||
"selected": False
|
||||
}
|
||||
resp1 = client.post("/api/api_list", data=json.dumps(payload1),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert resp1.status_code == 201
|
||||
|
||||
# Try creating second with same 'name' => should fail with 400
|
||||
payload2 = {
|
||||
"name": "Chatgpt",
|
||||
"key": "chatgpt-key2",
|
||||
"selected": True
|
||||
}
|
||||
resp2 = client.post("/api/api_list", data=json.dumps(payload2),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert resp2.status_code == 400, f"Expected 400 on duplicate provider, got {resp2.status_code}"
|
||||
|
||||
def test_create_api_key_invalid_name(client, auth_headers):
|
||||
"""
|
||||
If 'name' is not in ['Gemini','Deepseek','Chatgpt'], the code should fail with 400.
|
||||
"""
|
||||
invalid_payload = {
|
||||
"name": "InvalidProvider",
|
||||
"key": "some-key",
|
||||
"selected": False
|
||||
}
|
||||
resp = client.post("/api/api_list", data=json.dumps(invalid_payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert resp.status_code == 400, f"Expected 400 on invalid 'name', got {resp.status_code}"
|
||||
|
||||
def test_update_api_key(client, auth_headers):
|
||||
"""
|
||||
Test updating an existing API key fields: 'key', 'selected', or 'name'.
|
||||
"""
|
||||
# Create
|
||||
payload = {
|
||||
"name": "Deepseek",
|
||||
"key": "deepseek-initial",
|
||||
"selected": False
|
||||
}
|
||||
create_resp = client.post("/api/api_list", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert create_resp.status_code == 201
|
||||
api_id = json.loads(create_resp.data)["api_id"]
|
||||
|
||||
# Now update
|
||||
update_payload = {"key": "deepseek-updated", "selected": True}
|
||||
update_resp = client.put(f"/api/api_list/{api_id}",
|
||||
data=json.dumps(update_payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert update_resp.status_code == 200, f"Update failed: {update_resp.data.decode()}"
|
||||
|
||||
# List and confirm
|
||||
list_resp = client.get("/api/api_list", headers=auth_headers)
|
||||
data = json.loads(list_resp.data)
|
||||
updated_entry = next((k for k in data["api_keys"] if k["_id"] == api_id), None)
|
||||
assert updated_entry is not None, "Updated key not found in list"
|
||||
assert updated_entry["key"] == "deepseek-updated"
|
||||
assert updated_entry["selected"] is True
|
||||
|
||||
def test_delete_api_key(client, auth_headers):
|
||||
"""
|
||||
Test deleting an API key by _id.
|
||||
"""
|
||||
payload = {
|
||||
"name": "Gemini",
|
||||
"key": "gemini-key2",
|
||||
"selected": False
|
||||
}
|
||||
create_resp = client.post("/api/api_list", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert create_resp.status_code == 201
|
||||
api_id = json.loads(create_resp.data)["api_id"]
|
||||
|
||||
# Delete
|
||||
delete_resp = client.delete(f"/api/api_list/{api_id}", headers=auth_headers)
|
||||
assert delete_resp.status_code == 200, f"Deletion failed: {delete_resp.data.decode()}"
|
||||
|
||||
# Confirm it's gone
|
||||
list_resp = client.get("/api/api_list", headers=auth_headers)
|
||||
data = json.loads(list_resp.data)
|
||||
keys_left = [k for k in data["api_keys"] if k["_id"] == api_id]
|
||||
assert len(keys_left) == 0, "API key was not deleted properly"
|
||||
92
test/tests_backend/test_auth.py
Normal file
92
test/tests_backend/test_auth.py
Normal file
@ -0,0 +1,92 @@
|
||||
import json
|
||||
import pytest
|
||||
from backend.app import create_app # Adjust import paths based on your folder structure
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client():
|
||||
app = create_app()
|
||||
app.config["TESTING"] = True
|
||||
|
||||
# Optionally: use a test-specific MongoDB URI.
|
||||
# app.config["MONGO_URI"] = "your_test_mongodb_connection_string"
|
||||
|
||||
with app.test_client() as client:
|
||||
yield client
|
||||
|
||||
|
||||
def test_register_and_login(client):
|
||||
# Registration test.
|
||||
reg_payload = {
|
||||
"username": "test",
|
||||
"email": "test@example.com",
|
||||
"password": "1234"
|
||||
}
|
||||
response = client.post(
|
||||
"/api/register",
|
||||
data=json.dumps(reg_payload),
|
||||
content_type="application/json"
|
||||
)
|
||||
assert response.status_code == 201
|
||||
reg_data = json.loads(response.data)
|
||||
assert "token" in reg_data
|
||||
user_id = reg_data["user_id"]
|
||||
|
||||
# Login test.
|
||||
login_payload = {
|
||||
"username": "test",
|
||||
"password": "1234"
|
||||
}
|
||||
response = client.post(
|
||||
"/api/login",
|
||||
data=json.dumps(login_payload),
|
||||
content_type="application/json"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
login_data = json.loads(response.data)
|
||||
assert "token" in login_data
|
||||
assert user_id == login_data["user_id"]
|
||||
|
||||
|
||||
def test_delete_account(client):
|
||||
# Step 1: Register a new user
|
||||
reg_payload = {
|
||||
"username": "testuse",
|
||||
"email": "testuse@example.com",
|
||||
"password": "TestPassword123"
|
||||
}
|
||||
response = client.post(
|
||||
"/api/register",
|
||||
data=json.dumps(reg_payload),
|
||||
content_type="application/json"
|
||||
)
|
||||
assert response.status_code == 201
|
||||
reg_data = json.loads(response.data)
|
||||
token = reg_data["token"]
|
||||
user_id = reg_data["user_id"]
|
||||
|
||||
# Step 2: Optionally, login to verify credentials and obtain a fresh token
|
||||
login_payload = {
|
||||
"username": "testuse",
|
||||
"password": "TestPassword123"
|
||||
}
|
||||
response = client.post(
|
||||
"/api/login",
|
||||
data=json.dumps(login_payload),
|
||||
content_type="application/json"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
login_data = json.loads(response.data)
|
||||
assert "token" in login_data
|
||||
assert user_id == login_data["user_id"]
|
||||
|
||||
# Use the token from login (or registration) and add the "Bearer" prefix as expected.
|
||||
headers = {"Authorization": f"Bearer {login_data['token']}"}
|
||||
|
||||
# Step 3: Call the delete_account endpoint using the DELETE method.
|
||||
response = client.delete("/api/delete_account", headers=headers)
|
||||
# Expecting a successful deletion, i.e. status code 200.
|
||||
assert response.status_code == 200
|
||||
delete_data = json.loads(response.data)
|
||||
assert "deleted successfully" in delete_data["message"]
|
||||
|
||||
218
test/tests_backend/test_dialog.py
Normal file
218
test/tests_backend/test_dialog.py
Normal file
@ -0,0 +1,218 @@
|
||||
import os
|
||||
import json
|
||||
import pytest
|
||||
import uuid
|
||||
from bson.objectid import ObjectId
|
||||
from backend.app import create_app
|
||||
from backend.extensions import mongo
|
||||
|
||||
# Ensure a valid Gemini API key is provided for integration testing.
|
||||
VALID_GEMINI_KEY = "AIzaSyAMpVRmzQPYAYRH5GiBoQLY-r95ohYmhYs"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client():
|
||||
"""
|
||||
Initializes and yields a Flask test client from create_app().
|
||||
"""
|
||||
app = create_app()
|
||||
app.config["TESTING"] = True
|
||||
with app.test_client() as test_client:
|
||||
yield test_client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def auth_headers(client):
|
||||
"""
|
||||
Registers and logs in a new test user.
|
||||
Returns a dictionary containing the Authorization header and the user_id.
|
||||
"""
|
||||
unique_suffix = str(uuid.uuid4())[:8]
|
||||
username = f"test_dialog_{unique_suffix}"
|
||||
email = f"{username}@example.com"
|
||||
reg_payload = {
|
||||
"username": username,
|
||||
"email": email,
|
||||
"password": "Password123"
|
||||
}
|
||||
reg_resp = client.post(
|
||||
"/api/register",
|
||||
data=json.dumps(reg_payload),
|
||||
content_type="application/json"
|
||||
)
|
||||
assert reg_resp.status_code == 201, f"User registration failed: {reg_resp.data.decode()}"
|
||||
reg_data = reg_resp.get_json()
|
||||
return {"Authorization": f"Bearer {reg_data['token']}", "user_id": reg_data["user_id"]}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def gemini_api_config(auth_headers):
|
||||
"""
|
||||
Inserts a Gemini API key document for the test user into the api_list collection.
|
||||
Uses the valid API key from environment variable.
|
||||
"""
|
||||
from datetime import datetime # Import the datetime class for utcnow()
|
||||
user_id = ObjectId(auth_headers["user_id"])
|
||||
api_payload = {
|
||||
"uid": user_id,
|
||||
"name": "Gemini",
|
||||
"key": VALID_GEMINI_KEY,
|
||||
"selected": True,
|
||||
"createdAt": datetime.utcnow(),
|
||||
"updatedAt": datetime.utcnow()
|
||||
}
|
||||
inserted_result = mongo.db.api_list.insert_one(api_payload)
|
||||
yield
|
||||
# Teardown: remove the API key document after test.
|
||||
mongo.db.api_list.delete_one({"_id": inserted_result.inserted_id})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project_id(client, auth_headers):
|
||||
"""
|
||||
Creates a new project for the test user and returns its ID.
|
||||
"""
|
||||
payload = {
|
||||
"name": "Dialog Test Project",
|
||||
"topic": "Integration Testing",
|
||||
"description": "Project created for testing dialog endpoints."
|
||||
}
|
||||
resp = client.post("/api/projects", data=json.dumps(payload),
|
||||
content_type="application/json", headers={"Authorization": auth_headers["Authorization"]})
|
||||
assert resp.status_code == 201, f"Project creation failed: {resp.data.decode()}"
|
||||
data = resp.get_json()
|
||||
return data["project_id"]
|
||||
|
||||
|
||||
def test_create_dialog_no_start_message(client, auth_headers, gemini_api_config, project_id):
|
||||
"""
|
||||
Test creating a dialog session without a start message.
|
||||
"""
|
||||
payload = {"projectId": project_id}
|
||||
resp = client.post("/api/dialog",
|
||||
data=json.dumps(payload),
|
||||
headers={"Authorization": auth_headers["Authorization"]},
|
||||
content_type="application/json")
|
||||
assert resp.status_code == 201, f"Expected 201, got {resp.status_code}, {resp.data.decode()}"
|
||||
data = resp.get_json()
|
||||
assert "dialog_id" in data
|
||||
|
||||
# Retrieve the new dialog session to verify
|
||||
dialog_id = data["dialog_id"]
|
||||
get_resp = client.get(f"/api/dialog/{dialog_id}", headers={"Authorization": auth_headers["Authorization"]})
|
||||
assert get_resp.status_code == 200, f"Failed to get dialog: {get_resp.data.decode()}"
|
||||
dialog_data = get_resp.get_json()
|
||||
assert "sessionStartedAt" in dialog_data
|
||||
|
||||
|
||||
def test_create_dialog_with_start_message(client, auth_headers, gemini_api_config, project_id):
|
||||
"""
|
||||
Test creating a dialog session with a start message.
|
||||
"""
|
||||
payload = {
|
||||
"projectId": project_id,
|
||||
"sessionId": "testSession123",
|
||||
"startMessage": "Hello, I need research guidance."
|
||||
}
|
||||
resp = client.post("/api/dialog",
|
||||
data=json.dumps(payload),
|
||||
headers={"Authorization": auth_headers["Authorization"]},
|
||||
content_type="application/json")
|
||||
assert resp.status_code == 201, f"Expected 201, got {resp.status_code}, {resp.data.decode()}"
|
||||
data = resp.get_json()
|
||||
dialog_id = data["dialog_id"]
|
||||
|
||||
get_resp = client.get(f"/api/dialog/{dialog_id}", headers={"Authorization": auth_headers["Authorization"]})
|
||||
assert get_resp.status_code == 200, f"Failed to retrieve dialog: {get_resp.data.decode()}"
|
||||
dialog_data = get_resp.get_json()
|
||||
msgs = dialog_data.get("messages", [])
|
||||
assert len(msgs) >= 1, "Expected at least one message in the dialog."
|
||||
assert msgs[0]["role"] == "user"
|
||||
assert "Hello, I need research guidance." in msgs[0]["content"]
|
||||
|
||||
|
||||
def test_list_dialogs(client, auth_headers, gemini_api_config, project_id):
|
||||
"""
|
||||
Creates a couple of dialogs and then lists dialogs filtered by project.
|
||||
"""
|
||||
for _ in range(2):
|
||||
payload = {"projectId": project_id}
|
||||
resp = client.post("/api/dialog",
|
||||
data=json.dumps(payload),
|
||||
headers={"Authorization": auth_headers["Authorization"]},
|
||||
content_type="application/json")
|
||||
assert resp.status_code == 201
|
||||
|
||||
list_resp = client.get(f"/api/dialog?projectId={project_id}",
|
||||
headers={"Authorization": auth_headers["Authorization"]})
|
||||
assert list_resp.status_code == 200, f"Listing dialogs failed: {list_resp.data.decode()}"
|
||||
data = list_resp.get_json()
|
||||
dialogs = data.get("dialogs", [])
|
||||
assert len(dialogs) >= 2, "Expected at least two dialog sessions."
|
||||
|
||||
|
||||
def test_send_dialog_message_real_gemini(client, auth_headers, gemini_api_config, project_id):
|
||||
"""
|
||||
Test sending a message in a dialog session using the vector-based prompt.
|
||||
This test interacts with the real Gemini API.
|
||||
"""
|
||||
# Create a new dialog session.
|
||||
create_payload = {"projectId": project_id}
|
||||
create_resp = client.post("/api/dialog",
|
||||
data=json.dumps(create_payload),
|
||||
headers={"Authorization": auth_headers["Authorization"]},
|
||||
content_type="application/json")
|
||||
assert create_resp.status_code == 201, f"Dialog creation failed: {create_resp.data.decode()}"
|
||||
dialog_id = create_resp.get_json()["dialog_id"]
|
||||
|
||||
# Send a message.
|
||||
send_payload = {"content": "What further research should I pursue based on my current websites?"}
|
||||
send_resp = client.post(f"/api/dialog/{dialog_id}/send",
|
||||
data=json.dumps(send_payload),
|
||||
headers={"Authorization": auth_headers["Authorization"]},
|
||||
content_type="application/json")
|
||||
# This test makes a live call to the Gemini API.
|
||||
assert send_resp.status_code == 200, f"Send message failed: {send_resp.data.decode()}"
|
||||
send_data = send_resp.get_json()
|
||||
assert "llmResponse" in send_data, "Response missing LLM response."
|
||||
print("Gemini LLM response:", send_data["llmResponse"])
|
||||
|
||||
# Verify that the dialog now has additional messages.
|
||||
get_resp = client.get(f"/api/dialog/{dialog_id}", headers={"Authorization": auth_headers["Authorization"]})
|
||||
assert get_resp.status_code == 200, f"Retrieving dialog failed: {get_resp.data.decode()}"
|
||||
dialog_data = get_resp.get_json()
|
||||
messages = dialog_data.get("messages", [])
|
||||
assert len(messages) >= 2, "Expected at least two messages after sending (user and system)."
|
||||
|
||||
|
||||
def test_end_and_delete_session(client, auth_headers, gemini_api_config, project_id):
|
||||
"""
|
||||
Tests ending a dialog session and then deleting it.
|
||||
"""
|
||||
create_payload = {"projectId": project_id}
|
||||
create_resp = client.post("/api/dialog",
|
||||
data=json.dumps(create_payload),
|
||||
headers={"Authorization": auth_headers["Authorization"]},
|
||||
content_type="application/json")
|
||||
assert create_resp.status_code == 201, f"Dialog creation failed: {create_resp.data.decode()}"
|
||||
dialog_id = create_resp.get_json()["dialog_id"]
|
||||
|
||||
# End the dialog session.
|
||||
end_resp = client.put(f"/api/dialog/{dialog_id}/end", headers={"Authorization": auth_headers["Authorization"]})
|
||||
assert end_resp.status_code == 200, f"Ending dialog failed: {end_resp.data.decode()}"
|
||||
|
||||
# Attempt to send another message; should fail.
|
||||
send_payload = {"content": "Trying to send after end."}
|
||||
send_resp = client.post(f"/api/dialog/{dialog_id}/send",
|
||||
data=json.dumps(send_payload),
|
||||
headers={"Authorization": auth_headers["Authorization"]},
|
||||
content_type="application/json")
|
||||
assert send_resp.status_code == 400, "Expected error when sending message after ending session."
|
||||
|
||||
# Delete the dialog session.
|
||||
del_resp = client.delete(f"/api/dialog/{dialog_id}", headers={"Authorization": auth_headers["Authorization"]})
|
||||
assert del_resp.status_code == 200, f"Deleting dialog failed: {del_resp.data.decode()}"
|
||||
|
||||
# Verify that retrieving the dialog now returns 404.
|
||||
get_resp = client.get(f"/api/dialog/{dialog_id}", headers={"Authorization": auth_headers["Authorization"]})
|
||||
assert get_resp.status_code == 404, "Expected 404 when retrieving a deleted dialog."
|
||||
208
test/tests_backend/test_projects.py
Normal file
208
test/tests_backend/test_projects.py
Normal file
@ -0,0 +1,208 @@
|
||||
import json
|
||||
import uuid
|
||||
import pytest
|
||||
from backend.app import create_app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client():
|
||||
app = create_app()
|
||||
app.config["TESTING"] = True
|
||||
with app.test_client() as client:
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def auth_headers(client):
|
||||
"""
|
||||
Registers a test user with unique username and returns auth headers.
|
||||
"""
|
||||
unique_suffix = str(uuid.uuid4())[:8]
|
||||
username = f"projtest_{unique_suffix}"
|
||||
email = f"{username}@example.com"
|
||||
payload = {
|
||||
"username": username,
|
||||
"email": email,
|
||||
"password": "Password123"
|
||||
}
|
||||
response = client.post("/api/register", data=json.dumps(payload), content_type="application/json")
|
||||
assert response.status_code == 201, f"Registration failed: {response.data.decode()}"
|
||||
data = json.loads(response.data)
|
||||
token = data["token"]
|
||||
return {"Authorization": f"Bearer {token}"}
|
||||
|
||||
|
||||
def test_create_project(client, auth_headers):
|
||||
payload = {
|
||||
"name": "Test Project",
|
||||
"topic": "Testing",
|
||||
"description": "A project for testing purposes."
|
||||
}
|
||||
response = client.post("/api/projects", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert response.status_code == 201, f"Create project failed: {response.data.decode()}"
|
||||
data = json.loads(response.data)
|
||||
assert "project_id" in data
|
||||
assert "passkey" in data
|
||||
|
||||
|
||||
def test_get_projects(client, auth_headers):
|
||||
# Create two projects.
|
||||
payload1 = {"name": "Test Project One", "description": "First test project."}
|
||||
payload2 = {"name": "Test Project Two", "description": "Second test project."}
|
||||
|
||||
response1 = client.post("/api/projects", data=json.dumps(payload1),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
response2 = client.post("/api/projects", data=json.dumps(payload2),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert response1.status_code == 201, f"Project one creation failed: {response1.data.decode()}"
|
||||
assert response2.status_code == 201, f"Project two creation failed: {response2.data.decode()}"
|
||||
|
||||
response = client.get("/api/projects", headers=auth_headers)
|
||||
assert response.status_code == 200, f"Get projects failed: {response.data.decode()}"
|
||||
data = json.loads(response.data)
|
||||
assert "projects" in data
|
||||
assert isinstance(data["projects"], list)
|
||||
assert len(data["projects"]) >= 2
|
||||
|
||||
|
||||
def test_get_project_detail(client, auth_headers):
|
||||
payload = {"name": "Detail Project1111", "description": "A project for detail testing."}
|
||||
create_resp = client.post("/api/projects", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert create_resp.status_code == 201, f"Creation failed: {create_resp.data.decode()}"
|
||||
project_id = json.loads(create_resp.data)["project_id"]
|
||||
|
||||
response = client.get(f"/api/projects/{project_id}", headers=auth_headers)
|
||||
assert response.status_code == 200, f"Get detail failed: {response.data.decode()}"
|
||||
data = json.loads(response.data)
|
||||
assert data.get("name") == "Detail Project1111"
|
||||
assert "updatedAt" in data
|
||||
|
||||
|
||||
def test_update_project(client, auth_headers):
|
||||
payload = {"name": "Update Project", "description": "Initial description."}
|
||||
create_resp = client.post("/api/projects", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert create_resp.status_code == 201, f"Creation failed: {create_resp.data.decode()}"
|
||||
project_id = json.loads(create_resp.data)["project_id"]
|
||||
|
||||
update_payload = {"description": "Updated description.", "topic": "Updated Topic"}
|
||||
update_resp = client.put(f"/api/projects/{project_id}", data=json.dumps(update_payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert update_resp.status_code == 200, f"Update failed: {update_resp.data.decode()}"
|
||||
|
||||
detail_resp = client.get(f"/api/projects/{project_id}", headers=auth_headers)
|
||||
detail_data = json.loads(detail_resp.data)
|
||||
assert detail_data.get("description") == "Updated description."
|
||||
assert detail_data.get("topic") == "Updated Topic"
|
||||
|
||||
|
||||
def test_delete_project(client, auth_headers):
|
||||
payload = {"name": "Delete Project", "description": "Project to be deleted."}
|
||||
create_resp = client.post("/api/projects", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert create_resp.status_code == 201, f"Creation failed: {create_resp.data.decode()}"
|
||||
project_id = json.loads(create_resp.data)["project_id"]
|
||||
|
||||
delete_resp = client.delete(f"/api/projects/{project_id}", headers=auth_headers)
|
||||
assert delete_resp.status_code == 200, f"Deletion failed: {delete_resp.data.decode()}"
|
||||
detail_resp = client.get(f"/api/projects/{project_id}", headers=auth_headers)
|
||||
assert detail_resp.status_code == 404, "Deleted project still accessible"
|
||||
|
||||
|
||||
def test_get_project_summaries(client, auth_headers):
|
||||
# Create a new project to ensure at least one exists.
|
||||
payload = {"name": "Summary Project", "description": "Project for summary test."}
|
||||
create_resp = client.post("/api/projects", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert create_resp.status_code == 201, f"Project creation failed: {create_resp.data.decode()}"
|
||||
|
||||
response = client.get("/api/projects/summary", headers=auth_headers)
|
||||
assert response.status_code == 200, f"Summary fetch failed: {response.data.decode()}"
|
||||
data = json.loads(response.data)
|
||||
assert "projects" in data
|
||||
# Each project summary should include project_id, name, and updatedAt.
|
||||
for proj in data["projects"]:
|
||||
assert "project_id" in proj
|
||||
assert "name" in proj
|
||||
assert "updatedAt" in proj
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# New Tests
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
def test_get_project_info(client, auth_headers):
|
||||
"""
|
||||
Tests the /api/projects/<project_id>/info endpoint,
|
||||
which returns name, topic, description, keywords, summary.
|
||||
"""
|
||||
payload = {
|
||||
"name": "Info Project",
|
||||
"topic": "InfoTopic",
|
||||
"description": "Project with an info endpoint."
|
||||
}
|
||||
create_resp = client.post("/api/projects", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert create_resp.status_code == 201, f"Project creation failed: {create_resp.data.decode()}"
|
||||
project_id = json.loads(create_resp.data)["project_id"]
|
||||
|
||||
info_resp = client.get(f"/api/projects/{project_id}/info", headers=auth_headers)
|
||||
assert info_resp.status_code == 200, f"Get project info failed: {info_resp.data.decode()}"
|
||||
info_data = json.loads(info_resp.data)
|
||||
|
||||
assert info_data.get("name") == "Info Project"
|
||||
assert info_data.get("topic") == "InfoTopic"
|
||||
assert info_data.get("description") == "Project with an info endpoint."
|
||||
assert isinstance(info_data.get("keywords", []), list)
|
||||
assert "summary" in info_data
|
||||
|
||||
|
||||
def test_recalc_project_keywords(client, auth_headers):
|
||||
"""
|
||||
Tests the /api/projects/<project_id>/recalc_keywords endpoint.
|
||||
It should gather all URL keywords, sum them, sort by top 20, and update the project.
|
||||
"""
|
||||
# 1) Create a project
|
||||
payload = {"name": "Keyword Recalc Project", "description": "Test for recalc keywords."}
|
||||
create_resp = client.post("/api/projects", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert create_resp.status_code == 201, f"Project creation failed: {create_resp.data.decode()}"
|
||||
project_id = json.loads(create_resp.data)["project_id"]
|
||||
|
||||
# 2) Create multiple URLs with keywords
|
||||
url1_keywords = [{"word": "alpha", "percentage": 50}, {"word": "beta", "percentage": 10}]
|
||||
url2_keywords = [{"word": "alpha", "percentage": 20}, {"word": "gamma", "percentage": 15}]
|
||||
url_create_payload1 = {"url": "https://url1.com", "keywords": url1_keywords}
|
||||
url_create_payload2 = {"url": "https://url2.com", "keywords": url2_keywords}
|
||||
|
||||
resp1 = client.post(f"/api/projects/{project_id}/urls", data=json.dumps(url_create_payload1),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
resp2 = client.post(f"/api/projects/{project_id}/urls", data=json.dumps(url_create_payload2),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert resp1.status_code == 201, f"URL1 creation failed: {resp1.data.decode()}"
|
||||
assert resp2.status_code == 201, f"URL2 creation failed: {resp2.data.decode()}"
|
||||
|
||||
# 3) Recalc keywords
|
||||
recalc_resp = client.put(f"/api/projects/{project_id}/recalc_keywords", headers=auth_headers)
|
||||
assert recalc_resp.status_code == 200, f"Keyword recalc failed: {recalc_resp.data.decode()}"
|
||||
recalc_data = json.loads(recalc_resp.data)
|
||||
assert "keywords" in recalc_data
|
||||
top_keywords = recalc_data["keywords"]
|
||||
# Check that alpha is presumably 70, gamma=15, beta=10 => alpha, gamma, beta
|
||||
assert len(top_keywords) <= 20
|
||||
# alpha => 70 (50 + 20)
|
||||
alpha_kw = next((k for k in top_keywords if k["word"] == "alpha"), None)
|
||||
assert alpha_kw is not None, "Expected alpha in combined keywords"
|
||||
assert alpha_kw["percentage"] == 70.0
|
||||
|
||||
# 4) Fetch the project detail or info to confirm stored result
|
||||
info_resp = client.get(f"/api/projects/{project_id}/info", headers=auth_headers)
|
||||
assert info_resp.status_code == 200, f"Get project info after recalc failed: {info_resp.data.decode()}"
|
||||
info_data = json.loads(info_resp.data)
|
||||
# The project now has 'keywords' -> top 20
|
||||
project_keywords = info_data.get("keywords", [])
|
||||
alpha_in_project = next((k for k in project_keywords if k["word"] == "alpha"), None)
|
||||
assert alpha_in_project is not None, "Project keywords missing alpha"
|
||||
assert alpha_in_project["percentage"] == 70.0
|
||||
219
test/tests_backend/test_urls.py
Normal file
219
test/tests_backend/test_urls.py
Normal file
@ -0,0 +1,219 @@
|
||||
import json
|
||||
import pytest
|
||||
from unittest.mock import patch, ANY
|
||||
from backend.app import create_app
|
||||
|
||||
@pytest.fixture
|
||||
def client():
|
||||
app = create_app()
|
||||
app.config["TESTING"] = True
|
||||
with app.test_client() as client:
|
||||
yield client
|
||||
|
||||
@pytest.fixture
|
||||
def auth_headers(client):
|
||||
"""
|
||||
Registers a test user with unique username,
|
||||
logs in, and returns { "Authorization": "Bearer <token>" } headers.
|
||||
"""
|
||||
import uuid
|
||||
unique_suffix = str(uuid.uuid4())[:8]
|
||||
username = f"urltester_{unique_suffix}"
|
||||
email = f"{username}@example.com"
|
||||
reg_payload = {
|
||||
"username": username,
|
||||
"email": email,
|
||||
"password": "Password123"
|
||||
}
|
||||
reg_resp = client.post("/api/register", data=json.dumps(reg_payload), content_type="application/json")
|
||||
assert reg_resp.status_code == 201, f"User registration failed: {reg_resp.data.decode()}"
|
||||
data = json.loads(reg_resp.data)
|
||||
return {"Authorization": f"Bearer {data['token']}"}
|
||||
|
||||
@pytest.fixture
|
||||
def project_id(client, auth_headers):
|
||||
"""
|
||||
Creates a project for the user so we can attach URLs to it.
|
||||
Returns the project ID as a string.
|
||||
"""
|
||||
payload = {"name": "URLs Project", "description": "Project for URL tests."}
|
||||
resp = client.post("/api/projects", data=json.dumps(payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert resp.status_code == 201, f"Project creation failed: {resp.data.decode()}"
|
||||
data = json.loads(resp.data)
|
||||
return data["project_id"]
|
||||
|
||||
def test_create_url(client, auth_headers, project_id):
|
||||
"""
|
||||
Test creating a URL within a project.
|
||||
"""
|
||||
payload = {
|
||||
"url": "https://example.com",
|
||||
"title": "Example Site",
|
||||
"note": "Some personal note."
|
||||
}
|
||||
resp = client.post(f"/api/projects/{project_id}/urls",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert resp.status_code == 201, f"Create URL failed: {resp.data.decode()}"
|
||||
resp_data = json.loads(resp.data)
|
||||
assert "url_id" in resp_data
|
||||
|
||||
def test_list_urls(client, auth_headers, project_id):
|
||||
"""
|
||||
Test listing multiple URLs in a project.
|
||||
"""
|
||||
# Create first URL
|
||||
payload1 = {"url": "https://first-url.com", "title": "First URL"}
|
||||
resp1 = client.post(f"/api/projects/{project_id}/urls",
|
||||
data=json.dumps(payload1),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert resp1.status_code == 201, f"First URL creation failed: {resp1.data.decode()}"
|
||||
|
||||
# Create second URL
|
||||
payload2 = {"url": "https://second-url.com", "title": "Second URL"}
|
||||
resp2 = client.post(f"/api/projects/{project_id}/urls",
|
||||
data=json.dumps(payload2),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert resp2.status_code == 201, f"Second URL creation failed: {resp2.data.decode()}"
|
||||
|
||||
# Now list them
|
||||
list_resp = client.get(f"/api/projects/{project_id}/urls", headers=auth_headers)
|
||||
assert list_resp.status_code == 200, f"List URLs failed: {list_resp.data.decode()}"
|
||||
data = json.loads(list_resp.data)
|
||||
assert "urls" in data
|
||||
assert len(data["urls"]) >= 2
|
||||
|
||||
def test_get_url_detail(client, auth_headers, project_id):
|
||||
"""
|
||||
Test retrieving URL detail.
|
||||
"""
|
||||
payload = {"url": "https://detail-url.com", "title": "Detail URL"}
|
||||
resp = client.post(f"/api/projects/{project_id}/urls",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert resp.status_code == 201
|
||||
url_id = json.loads(resp.data)["url_id"]
|
||||
|
||||
detail_resp = client.get(f"/api/urls/{url_id}", headers=auth_headers)
|
||||
assert detail_resp.status_code == 200, f"Get URL detail failed: {detail_resp.data.decode()}"
|
||||
detail_data = json.loads(detail_resp.data)
|
||||
assert detail_data.get("title") == "Detail URL"
|
||||
|
||||
def test_update_url(client, auth_headers, project_id):
|
||||
"""
|
||||
Test updating an existing URL's fields.
|
||||
"""
|
||||
payload = {"url": "https://update-url.com", "title": "ToBeUpdated"}
|
||||
create_resp = client.post(f"/api/projects/{project_id}/urls",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert create_resp.status_code == 201
|
||||
url_id = json.loads(create_resp.data)["url_id"]
|
||||
|
||||
update_payload = {"title": "Updated Title", "starred": True, "note": "Updated note."}
|
||||
update_resp = client.put(f"/api/urls/{url_id}",
|
||||
data=json.dumps(update_payload),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert update_resp.status_code == 200, f"Update URL failed: {update_resp.data.decode()}"
|
||||
|
||||
# Confirm
|
||||
detail_resp = client.get(f"/api/urls/{url_id}", headers=auth_headers)
|
||||
data = json.loads(detail_resp.data)
|
||||
assert data.get("title") == "Updated Title"
|
||||
assert data.get("starred") is True
|
||||
assert data.get("note") == "Updated note."
|
||||
|
||||
@patch("backend.routes.urls.async_extract_title_and_keywords.delay")
|
||||
def test_extract_title_and_keywords(mock_task_delay, client, auth_headers, project_id):
|
||||
"""
|
||||
Test the asynchronous title/keyword extraction. We mock the Celery task's .delay() call.
|
||||
"""
|
||||
payload = {"url": "https://mock-url.com"}
|
||||
create_resp = client.post(f"/api/projects/{project_id}/urls",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert create_resp.status_code == 201
|
||||
url_id = json.loads(create_resp.data)["url_id"]
|
||||
|
||||
# Call the asynchronous endpoint
|
||||
extract_resp = client.put(f"/api/urls/{url_id}/extract_title_and_keywords", headers=auth_headers)
|
||||
# We now expect 202, since it queues a Celery task
|
||||
assert extract_resp.status_code == 202, f"Extraction queueing failed: {extract_resp.data.decode()}"
|
||||
|
||||
# Confirm the Celery task was indeed called with .delay(...)
|
||||
mock_task_delay.assert_called_once_with(url_id, ANY)
|
||||
|
||||
@patch("backend.routes.urls.async_summarize_url.delay")
|
||||
def test_summarize_url(mock_task_delay, client, auth_headers, project_id):
|
||||
"""
|
||||
Test the asynchronous summarization by mocking the Celery task call.
|
||||
"""
|
||||
payload = {"url": "https://mock-summary.com"}
|
||||
create_resp = client.post(f"/api/projects/{project_id}/urls",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert create_resp.status_code == 201
|
||||
url_id = json.loads(create_resp.data)["url_id"]
|
||||
|
||||
summarize_resp = client.put(f"/api/urls/{url_id}/summarize", headers=auth_headers)
|
||||
# Again, we expect 202
|
||||
assert summarize_resp.status_code == 202, f"Summarization queueing failed: {summarize_resp.data.decode()}"
|
||||
|
||||
mock_task_delay.assert_called_once_with(url_id, ANY)
|
||||
|
||||
def test_search_urls(client, auth_headers, project_id):
|
||||
"""
|
||||
Test searching URLs by note or keywords.
|
||||
"""
|
||||
# Create multiple URLs
|
||||
url1_payload = {
|
||||
"url": "https://search-url1.com",
|
||||
"note": "Unique note text",
|
||||
"keywords": [{"word": "alpha", "percentage": 90}]
|
||||
}
|
||||
url2_payload = {
|
||||
"url": "https://search-url2.com",
|
||||
"note": "Another note containing alpha",
|
||||
"keywords": [{"word": "beta", "percentage": 50}]
|
||||
}
|
||||
resp1 = client.post(f"/api/projects/{project_id}/urls", data=json.dumps(url1_payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
resp2 = client.post(f"/api/projects/{project_id}/urls", data=json.dumps(url2_payload),
|
||||
content_type="application/json", headers=auth_headers)
|
||||
assert resp1.status_code == 201
|
||||
assert resp2.status_code == 201
|
||||
|
||||
search_resp = client.get(f"/api/projects/{project_id}/search?q=alpha", headers=auth_headers)
|
||||
assert search_resp.status_code == 200, f"Search failed: {search_resp.data.decode()}"
|
||||
data = json.loads(search_resp.data)
|
||||
results = data.get("results", [])
|
||||
# Both URLs mention 'alpha'
|
||||
assert len(results) >= 2
|
||||
|
||||
def test_delete_url(client, auth_headers, project_id):
|
||||
"""
|
||||
Test deleting a URL.
|
||||
"""
|
||||
payload = {"url": "https://delete-url.com", "title": "Delete-Me"}
|
||||
create_resp = client.post(f"/api/projects/{project_id}/urls",
|
||||
data=json.dumps(payload),
|
||||
content_type="application/json",
|
||||
headers=auth_headers)
|
||||
assert create_resp.status_code == 201
|
||||
url_id = json.loads(create_resp.data)["url_id"]
|
||||
|
||||
delete_resp = client.delete(f"/api/urls/{url_id}", headers=auth_headers)
|
||||
assert delete_resp.status_code == 200, f"Deletion failed: {delete_resp.data.decode()}"
|
||||
|
||||
# Confirm it's gone
|
||||
detail_resp = client.get(f"/api/urls/{url_id}", headers=auth_headers)
|
||||
assert detail_resp.status_code == 404, "URL is still accessible after deletion."
|
||||
Loading…
x
Reference in New Issue
Block a user