From 2be3d00ac4a2684afa9d50fb4b9122a113117094 Mon Sep 17 00:00:00 2001 From: ldy Date: Mon, 9 Jun 2025 17:53:19 +0800 Subject: [PATCH] Initial Commit --- README.md | 3 +- backend_flask/.flaskenv | 3 + backend_flask/__init__.py | 0 backend_flask/celery_worker/__init__.py | 0 backend_flask/celery_worker/celery_app.py | 329 ++ backend_flask/flask/bin/Activate.ps1 | 247 ++ backend_flask/flask/bin/activate | 70 + backend_flask/flask/bin/activate.csh | 27 + backend_flask/flask/bin/activate.fish | 69 + backend_flask/flask/bin/celery | 8 + backend_flask/flask/bin/dotenv | 8 + backend_flask/flask/bin/f2py | 8 + backend_flask/flask/bin/flask | 8 + backend_flask/flask/bin/httpx | 8 + backend_flask/flask/bin/jsondiff | 41 + backend_flask/flask/bin/jsonpatch | 107 + backend_flask/flask/bin/jsonpointer | 67 + backend_flask/flask/bin/langchain-server | 8 + backend_flask/flask/bin/normalizer | 8 + backend_flask/flask/bin/pip | 8 + backend_flask/flask/bin/pip3 | 8 + backend_flask/flask/bin/pip3.12 | 8 + backend_flask/flask/bin/py.test | 8 + backend_flask/flask/bin/pyrsa-decrypt | 8 + backend_flask/flask/bin/pyrsa-encrypt | 8 + backend_flask/flask/bin/pyrsa-keygen | 8 + backend_flask/flask/bin/pyrsa-priv2pub | 8 + backend_flask/flask/bin/pyrsa-sign | 8 + backend_flask/flask/bin/pyrsa-verify | 8 + backend_flask/flask/bin/pytest | 8 + backend_flask/flask/bin/python | 1 + backend_flask/flask/bin/python3 | 1 + backend_flask/flask/bin/python3.12 | 1 + backend_flask/flask/bin/tqdm | 8 + .../site/python3.12/greenlet/greenlet.h | 164 + backend_flask/flask/lib64 | 1 + backend_flask/flask/pyvenv.cfg | 5 + backend_flask/myapp/__init__.py | 123 + backend_flask/myapp/activity/__init__.py | 11 + .../myapp/activity/activity_routes.py | 297 ++ backend_flask/myapp/ai_services/__init__.py | 13 + backend_flask/myapp/ai_services/ai_routes.py | 337 ++ backend_flask/myapp/auth/__init__.py | 13 + backend_flask/myapp/auth/auth_routes.py | 444 +++ backend_flask/myapp/config.py | 31 + backend_flask/myapp/dialog/__init__.py | 13 + backend_flask/myapp/dialog/dialog_routes.py | 787 +++++ backend_flask/myapp/extensions.py | 24 + backend_flask/myapp/models.py | 4 + backend_flask/myapp/projects/__init__.py | 13 + .../myapp/projects/projects_routes.py | 715 +++++ backend_flask/myapp/schemas.py | 227 ++ backend_flask/myapp/urls/__init__.py | 14 + backend_flask/myapp/urls/urls_routes.py | 817 +++++ backend_flask/myapp/utils.py | 93 + backend_flask/requirements.txt | 104 + backend_flask/run.py | 32 + backend_flask/test_auth_init.py | 51 + frontend_react/.Rhistory | 0 frontend_react/.gitignore | 24 + frontend_react/README.md | 12 + frontend_react/eslint.config.js | 33 + frontend_react/index.html | 13 + frontend_react/package-lock.json | 2825 +++++++++++++++++ frontend_react/package.json | 31 + frontend_react/public/vite.svg | 1 + frontend_react/src/App.jsx | 90 + frontend_react/src/App.module.css | 150 + .../components/LeftSidebar/LeftSidebar.jsx | 183 ++ .../LeftSidebar/LeftSidebar.module.css | 213 ++ .../src/components/LoginPage/LoginPage.jsx | 156 + .../components/LoginPage/LoginPage.module.css | 281 ++ .../components/MainContent/MainContent.jsx | 291 ++ .../MainContent/MainContent.module.css | 113 + .../ProjectHeader/ProjectHeader.jsx | 87 + .../ProjectHeader/ProjectHeader.module.css | 96 + .../src/components/UrlCard/UrlCard.jsx | 164 + .../src/components/UrlCard/UrlCard.module.css | 262 ++ .../components/UrlCardList/UrlCardList.jsx | 92 + .../UrlCardList/UrlCardList.module.css | 16 + .../UrlDetailPage/UrlDetailPage.jsx | 308 ++ .../UrlDetailPage/UrlDetailPage.module.css | 293 ++ frontend_react/src/components/react.svg | 1 + frontend_react/src/index.css | 82 + frontend_react/src/main.jsx | 10 + frontend_react/src/services/.Rhistory | 0 frontend_react/src/services/api.js | 603 ++++ frontend_react/src/services/api_test.js | 372 +++ frontend_react/vite.config.js | 7 + test/tests_backend/__init__.py | 1 + test/tests_backend/test_activity.py | 169 + test/tests_backend/test_api_list.py | 162 + test/tests_backend/test_auth.py | 92 + test/tests_backend/test_dialog.py | 218 ++ test/tests_backend/test_projects.py | 208 ++ test/tests_backend/test_urls.py | 219 ++ 96 files changed, 13327 insertions(+), 2 deletions(-) create mode 100644 backend_flask/.flaskenv create mode 100644 backend_flask/__init__.py create mode 100644 backend_flask/celery_worker/__init__.py create mode 100644 backend_flask/celery_worker/celery_app.py create mode 100644 backend_flask/flask/bin/Activate.ps1 create mode 100644 backend_flask/flask/bin/activate create mode 100644 backend_flask/flask/bin/activate.csh create mode 100644 backend_flask/flask/bin/activate.fish create mode 100644 backend_flask/flask/bin/celery create mode 100644 backend_flask/flask/bin/dotenv create mode 100644 backend_flask/flask/bin/f2py create mode 100644 backend_flask/flask/bin/flask create mode 100644 backend_flask/flask/bin/httpx create mode 100644 backend_flask/flask/bin/jsondiff create mode 100644 backend_flask/flask/bin/jsonpatch create mode 100644 backend_flask/flask/bin/jsonpointer create mode 100644 backend_flask/flask/bin/langchain-server create mode 100644 backend_flask/flask/bin/normalizer create mode 100644 backend_flask/flask/bin/pip create mode 100644 backend_flask/flask/bin/pip3 create mode 100644 backend_flask/flask/bin/pip3.12 create mode 100644 backend_flask/flask/bin/py.test create mode 100644 backend_flask/flask/bin/pyrsa-decrypt create mode 100644 backend_flask/flask/bin/pyrsa-encrypt create mode 100644 backend_flask/flask/bin/pyrsa-keygen create mode 100644 backend_flask/flask/bin/pyrsa-priv2pub create mode 100644 backend_flask/flask/bin/pyrsa-sign create mode 100644 backend_flask/flask/bin/pyrsa-verify create mode 100644 backend_flask/flask/bin/pytest create mode 100644 backend_flask/flask/bin/python create mode 100644 backend_flask/flask/bin/python3 create mode 100644 backend_flask/flask/bin/python3.12 create mode 100644 backend_flask/flask/bin/tqdm create mode 100644 backend_flask/flask/include/site/python3.12/greenlet/greenlet.h create mode 100644 backend_flask/flask/lib64 create mode 100644 backend_flask/flask/pyvenv.cfg create mode 100644 backend_flask/myapp/__init__.py create mode 100644 backend_flask/myapp/activity/__init__.py create mode 100644 backend_flask/myapp/activity/activity_routes.py create mode 100644 backend_flask/myapp/ai_services/__init__.py create mode 100644 backend_flask/myapp/ai_services/ai_routes.py create mode 100644 backend_flask/myapp/auth/__init__.py create mode 100644 backend_flask/myapp/auth/auth_routes.py create mode 100644 backend_flask/myapp/config.py create mode 100644 backend_flask/myapp/dialog/__init__.py create mode 100644 backend_flask/myapp/dialog/dialog_routes.py create mode 100644 backend_flask/myapp/extensions.py create mode 100644 backend_flask/myapp/models.py create mode 100644 backend_flask/myapp/projects/__init__.py create mode 100644 backend_flask/myapp/projects/projects_routes.py create mode 100644 backend_flask/myapp/schemas.py create mode 100644 backend_flask/myapp/urls/__init__.py create mode 100644 backend_flask/myapp/urls/urls_routes.py create mode 100644 backend_flask/myapp/utils.py create mode 100644 backend_flask/requirements.txt create mode 100644 backend_flask/run.py create mode 100644 backend_flask/test_auth_init.py create mode 100644 frontend_react/.Rhistory create mode 100644 frontend_react/.gitignore create mode 100644 frontend_react/README.md create mode 100644 frontend_react/eslint.config.js create mode 100644 frontend_react/index.html create mode 100644 frontend_react/package-lock.json create mode 100644 frontend_react/package.json create mode 100644 frontend_react/public/vite.svg create mode 100644 frontend_react/src/App.jsx create mode 100644 frontend_react/src/App.module.css create mode 100644 frontend_react/src/components/LeftSidebar/LeftSidebar.jsx create mode 100644 frontend_react/src/components/LeftSidebar/LeftSidebar.module.css create mode 100644 frontend_react/src/components/LoginPage/LoginPage.jsx create mode 100644 frontend_react/src/components/LoginPage/LoginPage.module.css create mode 100644 frontend_react/src/components/MainContent/MainContent.jsx create mode 100644 frontend_react/src/components/MainContent/MainContent.module.css create mode 100644 frontend_react/src/components/ProjectHeader/ProjectHeader.jsx create mode 100644 frontend_react/src/components/ProjectHeader/ProjectHeader.module.css create mode 100644 frontend_react/src/components/UrlCard/UrlCard.jsx create mode 100644 frontend_react/src/components/UrlCard/UrlCard.module.css create mode 100644 frontend_react/src/components/UrlCardList/UrlCardList.jsx create mode 100644 frontend_react/src/components/UrlCardList/UrlCardList.module.css create mode 100644 frontend_react/src/components/UrlDetailPage/UrlDetailPage.jsx create mode 100644 frontend_react/src/components/UrlDetailPage/UrlDetailPage.module.css create mode 100644 frontend_react/src/components/react.svg create mode 100644 frontend_react/src/index.css create mode 100644 frontend_react/src/main.jsx create mode 100644 frontend_react/src/services/.Rhistory create mode 100644 frontend_react/src/services/api.js create mode 100644 frontend_react/src/services/api_test.js create mode 100644 frontend_react/vite.config.js create mode 100644 test/tests_backend/__init__.py create mode 100644 test/tests_backend/test_activity.py create mode 100644 test/tests_backend/test_api_list.py create mode 100644 test/tests_backend/test_auth.py create mode 100644 test/tests_backend/test_dialog.py create mode 100644 test/tests_backend/test_projects.py create mode 100644 test/tests_backend/test_urls.py diff --git a/README.md b/README.md index 277b1f5..5514b74 100644 --- a/README.md +++ b/README.md @@ -1,2 +1 @@ -# SurfSmart - +# SurfSmart \ No newline at end of file diff --git a/backend_flask/.flaskenv b/backend_flask/.flaskenv new file mode 100644 index 0000000..64dcc5e --- /dev/null +++ b/backend_flask/.flaskenv @@ -0,0 +1,3 @@ +FLASK_APP="myapp:create_app()" +FLASK_ENV="development" +PYTHONPATH=. \ No newline at end of file diff --git a/backend_flask/__init__.py b/backend_flask/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend_flask/celery_worker/__init__.py b/backend_flask/celery_worker/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend_flask/celery_worker/celery_app.py b/backend_flask/celery_worker/celery_app.py new file mode 100644 index 0000000..bae061a --- /dev/null +++ b/backend_flask/celery_worker/celery_app.py @@ -0,0 +1,329 @@ +import datetime +import re +import string +from collections import Counter, defaultdict + +import requests +from requests.exceptions import Timeout, RequestException, ConnectionError, HTTPError +from bs4 import BeautifulSoup +from bson.objectid import ObjectId, InvalidId +from celery import Celery + +import pymongo +from ..myapp.config import Config + +# --- Database Connection Setup for Celery --- +mongo_client = None +mongo_db = None + + +def get_db(): + """ + Helper to get a dedicated DB connection for Celery tasks. + """ + global mongo_client, mongo_db + if mongo_db is None: + try: + print("Task: Initializing new MongoDB client connection...") + mongo_client = pymongo.MongoClient(Config.MONGO_URI, serverSelectionTimeoutMS=5000) + mongo_client.admin.command('ismaster') + mongo_db = mongo_client.get_database() + print(f"Task: MongoDB connection successful. Using database: {mongo_db.name}") + except Exception as e: + print(f"FATAL Task Error: Could not connect to MongoDB: {e}") + mongo_db = None + raise RuntimeError(f"Database connection failed in Celery task: {e}") + return mongo_db + + +# --- Celery configuration --- +celery = Celery("my_celery_app") +celery.conf.broker_url = Config.CELERY_BROKER_URL +celery.conf.result_backend = Config.CELERY_RESULT_BACKEND + +REQUEST_TIMEOUT = 15 + +# --- Text Cleaning Setup --- +PUNCT_REGEX = re.compile(rf"[{re.escape(string.punctuation)}]+") +STOPWORDS = { + "the", "is", "a", "an", "of", "to", "and", "for", "on", "in", "that", "it", "with", "as", "at", "be", + "this", "are", "was", "were", "will", "would", "or", "so", "if", "then", "from", "not", "by", "we", "you", + "i", "he", "she", "they", "have", "has", "had", "but", "too", "some", "any", "my", "your", "their", "me" +} + + +def clean_and_tokenize(text): + """ + Performs minimal cleaning by splitting the text. + For English this works reasonably; for Chinese you may use a segmentation library like jieba. + """ + if not text: + return [] + return text.split() + + +# -------------------------- +# Task: Asynchronous Title and Keyword Extraction +# -------------------------- +@celery.task(bind=True, max_retries=1, default_retry_delay=10) +def async_extract_title_and_keywords(self, url_id_str, user_id_str): + """ + Fetches the webpage, extracts the title and computes the top 20 keywords from its body text. + Updates the URL document with the new title and keywords. + """ + print(f"Task: Starting title/keyword extraction for URL ID: {url_id_str}") + try: + db = get_db() + url_obj_id = ObjectId(url_id_str) + except InvalidId: + print(f"Task Error: Invalid URL ID format: {url_id_str}") + return "Invalid URL ID format." + except Exception as e: + print(f"Task Error: Could not initialize DB or ObjectId: {e}") + return f"DB/ObjectId Error: {e}" + + if db is None: + print(f"Task Error: DB connection is None for URL ID: {url_id_str}") + return "DB connection error." + + try: + url_doc = db.urls.find_one({"_id": url_obj_id}) + if not url_doc: + print(f"Task Error: URL doc not found for ID: {url_id_str}") + return "URL doc not found." + + page_url = url_doc.get("url", "") + if not page_url: + print(f"Task Error: No URL found in doc: {url_id_str}") + db.urls.update_one({"_id": url_obj_id}, + {"$set": {"processingStatus": "failed", "updatedAt": datetime.datetime.utcnow()}}) + return "No URL found in doc." + + page_title = "" + keywords_list = [] + status_to_set = "failed" + + try: + print(f"Task: Fetching URL: {page_url} with timeout={REQUEST_TIMEOUT}") + headers = {'User-Agent': 'Mozilla/5.0 (compatible; SurfSmartBot/1.0; +http://example.com/bot)'} + r = requests.get(page_url, timeout=REQUEST_TIMEOUT, headers=headers, allow_redirects=True) + r.raise_for_status() + soup = BeautifulSoup(r.text, "html.parser") + page_title = soup.title.string.strip() if soup.title else url_doc.get("title", "") + body_text = soup.body.get_text(" ", strip=True) if soup.body else "" + tokens = clean_and_tokenize(body_text) + if tokens: + counter = Counter(tokens) + top_20 = counter.most_common(20) + total_count = sum(count for _, count in top_20) + for word, count in top_20: + perc = round((count / total_count) * 100, 2) if total_count > 0 else 0 + keywords_list.append({"word": word, "percentage": perc}) + status_to_set = "completed" + print( + f"Task: Extraction completed for URL {url_id_str}. Title: '{page_title}', Keywords count: {len(keywords_list)}") + except Timeout: + print(f"Task Error: Request timed out for URL: {page_url}") + except ConnectionError: + print(f"Task Error: Connection error for URL: {page_url}") + except HTTPError as http_err: + print(f"Task Error: HTTP error occurred: {http_err} for URL: {page_url}") + except RequestException as req_err: + print(f"Task Error: Request exception for URL {page_url}: {req_err}") + except Exception as e: + print(f"Task Error: Unexpected error processing URL {page_url}: {e}") + try: + self.retry(exc=e) + except Exception as retry_err: + print(f"Task Error: Retry failed for URL {url_id_str}: {retry_err}") + + update_data = { + "processingStatus": status_to_set, + "updatedAt": datetime.datetime.utcnow() + } + if status_to_set == "completed": + update_data["title"] = page_title + update_data["keywords"] = keywords_list + + db.urls.update_one({"_id": url_obj_id}, {"$set": update_data}) + print(f"Task: DB updated for URL {url_id_str} with extraction status '{status_to_set}'") + return f"OK: Extraction task completed with status {status_to_set}" + except Exception as e: + print(f"Task Error: Failed during extraction for URL {url_id_str}: {e}") + try: + db.urls.update_one({"_id": url_obj_id}, + {"$set": {"processingStatus": "failed", "updatedAt": datetime.datetime.utcnow()}}) + except Exception: + pass + return f"Error: Extraction task failed for URL {url_id_str}" + + +# -------------------------- +# Task: Asynchronous Summarization +# -------------------------- +@celery.task(bind=True, max_retries=1, default_retry_delay=10) +def async_summarize_url(self, url_id_str, user_id_str, use_gemini): + """ + Fetches webpage content and extracts up to the first 1000 words. + If use_gemini is True and a valid Gemini API key is present, builds an effective prompt + and calls Gemini to generate a ~300-word summary (under 350 words). Otherwise, truncates the text + to around 300 words. + Updates the URL document's 'summary' and 'processingStatus' accordingly. + """ + print(f"Task: Starting summary generation for URL ID: {url_id_str}") + try: + db = get_db() + url_obj_id = ObjectId(url_id_str) + except InvalidId: + print(f"Task Error: Invalid URL ID format: {url_id_str}") + return "Invalid URL ID format." + except Exception as e: + print(f"Task Error: Could not initialize DB or ObjectId: {e}") + return f"DB/ObjectId Error: {e}" + + if db is None: + print(f"Task Error: DB connection is None for URL ID: {url_id_str}") + return "DB connection error." + + try: + url_doc = db.urls.find_one({"_id": url_obj_id}) + if not url_doc: + print(f"Task Error: URL doc not found for ID: {url_id_str}") + return "URL doc not found." + + page_url = url_doc.get("url", "") + if not page_url: + print(f"Task Error: No URL found in doc: {url_id_str}") + db.urls.update_one({"_id": url_obj_id}, + {"$set": {"processingStatus": "failed", "updatedAt": datetime.datetime.utcnow()}}) + return "No URL found in doc." + + headers = {'User-Agent': 'Mozilla/5.0 (compatible; SurfSmartBot/1.0; +http://example.com/bot)'} + r = requests.get(page_url, timeout=REQUEST_TIMEOUT, headers=headers, allow_redirects=True) + r.raise_for_status() + soup = BeautifulSoup(r.text, "html.parser") + body_text = soup.body.get_text(" ", strip=True) if soup.body else "" + words_full = body_text.split() # For better language support, integrate a segmentation tool if needed. + text_1000 = " ".join(words_full[:1000]) + + summary_result = "" + if use_gemini: + api_doc = db.api_list.find_one({"uid": ObjectId(user_id_str), "selected": True, "name": "Gemini"}) + if api_doc and api_doc.get("key"): + gemini_key = api_doc.get("key") + prompt = ( + "You are an expert summarizer. Below is text extracted from a webpage. " + "Please generate a concise, high-quality summary of approximately 300 words (but under 350 words). " + "Ensure the summary is in the same language as the input text.\n\n" + + text_1000 + ) + try: + import google.generativeai as genai + from google.api_core import exceptions as google_exceptions + genai.configure(api_key=gemini_key) + GEMINI_MODEL_NAME = 'gemini-1.5-pro-latest' + model = genai.GenerativeModel(GEMINI_MODEL_NAME) + gemini_input = [{"role": "user", "parts": [{"text": prompt}]}] + llm_response = model.generate_content(gemini_input) + summary_result = llm_response.text if llm_response.parts else "" + except Exception as gem_err: + print(f"Task Error: Gemini API error: {gem_err}. Falling back to truncation.") + summary_result = " ".join(text_1000.split()[:300]) + else: + summary_result = " ".join(text_1000.split()[:300]) + else: + summary_result = " ".join(text_1000.split()[:300]) + + status_to_set = "completed" if summary_result.strip() else "failed" + update_data = { + "summary": summary_result, + "processingStatus": status_to_set, + "updatedAt": datetime.datetime.utcnow() + } + db.urls.update_one({"_id": url_obj_id}, {"$set": update_data}) + print( + f"Task: Summary generation for URL {url_id_str} completed with status '{status_to_set}'. Word count: {len(summary_result.split())}") + return f"OK: Summary task completed with status {status_to_set}" + except Timeout: + print(f"Task Error: Request timed out for URL: {page_url}") + except ConnectionError: + print(f"Task Error: Connection error for URL: {page_url}") + except HTTPError as http_err: + print(f"Task Error: HTTP error occurred: {http_err} for URL: {page_url}") + except RequestException as req_err: + print(f"Task Error: Request exception for URL {page_url}: {req_err}") + except Exception as e: + print(f"Task Error: Unexpected error during summarization for URL {page_url}: {e}") + try: + self.retry(exc=e) + except Exception as retry_err: + print(f"Task Error: Retry failed for URL {url_id_str}: {retry_err}") + try: + db.urls.update_one({"_id": url_obj_id}, + {"$set": {"processingStatus": "failed", "updatedAt": datetime.datetime.utcnow()}}) + except Exception: + pass + return f"Error: Summarization task failed for URL {url_id_str}" + + +# -------------------------- +# Task: Asynchronous Recalculate Project Keywords +# -------------------------- +@celery.task(bind=True, max_retries=1, default_retry_delay=10) +def async_recalc_project_keywords(self, project_id, user_id_str): + """ + Recalculates project keywords by summing the percentages from all associated URL documents. + Retains the top 20 keywords and updates the project document. + """ + print(f"Task: Starting keywords recalculation for project {project_id}") + try: + db = get_db() + project_obj_id = ObjectId(project_id) + except InvalidId: + print(f"Task Error: Invalid project ID format: {project_id}") + return "Invalid project ID format." + except Exception as e: + print(f"Task Error: Unable to initialize DB or convert project ID: {e}") + return f"DB/ObjectId Error: {e}" + + if db is None: + print(f"Task Error: DB connection is None for project {project_id}") + return "DB connection error." + + try: + cursor = db.urls.find({"projectId": project_obj_id}, {"keywords": 1}) + combined = defaultdict(float) + for doc in cursor: + keywords_list = doc.get("keywords", []) + if isinstance(keywords_list, list): + for kw in keywords_list: + if isinstance(kw, dict): + word = kw.get("word", "").strip() + try: + percentage = float(kw.get("percentage", 0.0)) + except (ValueError, TypeError): + percentage = 0.0 + if word and isinstance(word, str): + combined[word] += percentage + else: + print(f"Task Warning: Non-dict item in keywords for a URL in project {project_id}") + else: + print(f"Task Warning: Keywords field is not a list for a URL in project {project_id}") + + sorted_kw = sorted(combined.items(), key=lambda x: x[1], reverse=True)[:20] + top_keywords = [{"word": w, "percentage": round(val, 2)} for w, val in sorted_kw] + + update_data = { + "keywords": top_keywords, + "updatedAt": datetime.datetime.utcnow() + } + db.projects.update_one({"_id": project_obj_id}, {"$set": update_data}) + print(f"Task: Keywords recalculation for project {project_id} completed. Top keywords: {top_keywords}") + return f"OK: Project keywords recalculated successfully." + except Exception as e: + print(f"Task Error: Failed during keywords recalculation for project {project_id}: {e}") + try: + db.projects.update_one({"_id": project_obj_id}, {"$set": {"updatedAt": datetime.datetime.utcnow()}}) + except Exception: + pass + return f"Error: Keywords recalculation failed for project {project_id}" diff --git a/backend_flask/flask/bin/Activate.ps1 b/backend_flask/flask/bin/Activate.ps1 new file mode 100644 index 0000000..eeea358 --- /dev/null +++ b/backend_flask/flask/bin/Activate.ps1 @@ -0,0 +1,247 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove VIRTUAL_ENV_PROMPT altogether. + if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { + Remove-Item -Path env:VIRTUAL_ENV_PROMPT + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } + $env:VIRTUAL_ENV_PROMPT = $Prompt +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/backend_flask/flask/bin/activate b/backend_flask/flask/bin/activate new file mode 100644 index 0000000..eb0f823 --- /dev/null +++ b/backend_flask/flask/bin/activate @@ -0,0 +1,70 @@ +# This file must be used with "source bin/activate" *from bash* +# You cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # Call hash to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + hash -r 2> /dev/null + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + unset VIRTUAL_ENV_PROMPT + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +# on Windows, a path can contain colons and backslashes and has to be converted: +if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then + # transform D:\path\to\venv to /d/path/to/venv on MSYS + # and to /cygdrive/d/path/to/venv on Cygwin + export VIRTUAL_ENV=$(cygpath /home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask) +else + # use the path as-is + export VIRTUAL_ENV=/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask +fi + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/"bin":$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + PS1='(flask) '"${PS1:-}" + export PS1 + VIRTUAL_ENV_PROMPT='(flask) ' + export VIRTUAL_ENV_PROMPT +fi + +# Call hash to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +hash -r 2> /dev/null diff --git a/backend_flask/flask/bin/activate.csh b/backend_flask/flask/bin/activate.csh new file mode 100644 index 0000000..dce2ff6 --- /dev/null +++ b/backend_flask/flask/bin/activate.csh @@ -0,0 +1,27 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. + +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV /home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/"bin":$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + set prompt = '(flask) '"$prompt" + setenv VIRTUAL_ENV_PROMPT '(flask) ' +endif + +alias pydoc python -m pydoc + +rehash diff --git a/backend_flask/flask/bin/activate.fish b/backend_flask/flask/bin/activate.fish new file mode 100644 index 0000000..e19fb9f --- /dev/null +++ b/backend_flask/flask/bin/activate.fish @@ -0,0 +1,69 @@ +# This file must be used with "source /bin/activate.fish" *from fish* +# (https://fishshell.com/). You cannot run it directly. + +function deactivate -d "Exit virtual environment and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + set -e _OLD_FISH_PROMPT_OVERRIDE + # prevents error when using nested fish instances (Issue #93858) + if functions -q _old_fish_prompt + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + end + + set -e VIRTUAL_ENV + set -e VIRTUAL_ENV_PROMPT + if test "$argv[1]" != "nondestructive" + # Self-destruct! + functions -e deactivate + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV /home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/"bin $PATH + +# Unset PYTHONHOME if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # Save the current fish_prompt function as the function _old_fish_prompt. + functions -c fish_prompt _old_fish_prompt + + # With the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command. + set -l old_status $status + + # Output the venv prompt; color taken from the blue of the Python logo. + printf "%s%s%s" (set_color 4B8BBE) '(flask) ' (set_color normal) + + # Restore the return status of the previous command. + echo "exit $old_status" | . + # Output the original/"old" prompt. + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" + set -gx VIRTUAL_ENV_PROMPT '(flask) ' +end diff --git a/backend_flask/flask/bin/celery b/backend_flask/flask/bin/celery new file mode 100644 index 0000000..6362f9a --- /dev/null +++ b/backend_flask/flask/bin/celery @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from celery.__main__ import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend_flask/flask/bin/dotenv b/backend_flask/flask/bin/dotenv new file mode 100644 index 0000000..8c71b42 --- /dev/null +++ b/backend_flask/flask/bin/dotenv @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from dotenv.__main__ import cli +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli()) diff --git a/backend_flask/flask/bin/f2py b/backend_flask/flask/bin/f2py new file mode 100644 index 0000000..98a859b --- /dev/null +++ b/backend_flask/flask/bin/f2py @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from numpy.f2py.f2py2e import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend_flask/flask/bin/flask b/backend_flask/flask/bin/flask new file mode 100644 index 0000000..dec4293 --- /dev/null +++ b/backend_flask/flask/bin/flask @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from flask.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend_flask/flask/bin/httpx b/backend_flask/flask/bin/httpx new file mode 100644 index 0000000..49e5d65 --- /dev/null +++ b/backend_flask/flask/bin/httpx @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from httpx import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend_flask/flask/bin/jsondiff b/backend_flask/flask/bin/jsondiff new file mode 100644 index 0000000..7a70534 --- /dev/null +++ b/backend_flask/flask/bin/jsondiff @@ -0,0 +1,41 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- + +from __future__ import print_function + +import sys +import json +import jsonpatch +import argparse + + +parser = argparse.ArgumentParser(description='Diff two JSON files') +parser.add_argument('FILE1', type=argparse.FileType('r')) +parser.add_argument('FILE2', type=argparse.FileType('r')) +parser.add_argument('--indent', type=int, default=None, + help='Indent output by n spaces') +parser.add_argument('-u', '--preserve-unicode', action='store_true', + help='Output Unicode character as-is without using Code Point') +parser.add_argument('-v', '--version', action='version', + version='%(prog)s ' + jsonpatch.__version__) + + +def main(): + try: + diff_files() + except KeyboardInterrupt: + sys.exit(1) + + +def diff_files(): + """ Diffs two JSON files and prints a patch """ + args = parser.parse_args() + doc1 = json.load(args.FILE1) + doc2 = json.load(args.FILE2) + patch = jsonpatch.make_patch(doc1, doc2) + if patch.patch: + print(json.dumps(patch.patch, indent=args.indent, ensure_ascii=not(args.preserve_unicode))) + sys.exit(1) + +if __name__ == "__main__": + main() diff --git a/backend_flask/flask/bin/jsonpatch b/backend_flask/flask/bin/jsonpatch new file mode 100644 index 0000000..5e5226b --- /dev/null +++ b/backend_flask/flask/bin/jsonpatch @@ -0,0 +1,107 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- + +import sys +import os.path +import json +import jsonpatch +import tempfile +import argparse + + +parser = argparse.ArgumentParser( + description='Apply a JSON patch on a JSON file') +parser.add_argument('ORIGINAL', type=argparse.FileType('r'), + help='Original file') +parser.add_argument('PATCH', type=argparse.FileType('r'), + nargs='?', default=sys.stdin, + help='Patch file (read from stdin if omitted)') +parser.add_argument('--indent', type=int, default=None, + help='Indent output by n spaces') +parser.add_argument('-b', '--backup', action='store_true', + help='Back up ORIGINAL if modifying in-place') +parser.add_argument('-i', '--in-place', action='store_true', + help='Modify ORIGINAL in-place instead of to stdout') +parser.add_argument('-v', '--version', action='version', + version='%(prog)s ' + jsonpatch.__version__) +parser.add_argument('-u', '--preserve-unicode', action='store_true', + help='Output Unicode character as-is without using Code Point') + +def main(): + try: + patch_files() + except KeyboardInterrupt: + sys.exit(1) + + +def patch_files(): + """ Diffs two JSON files and prints a patch """ + args = parser.parse_args() + doc = json.load(args.ORIGINAL) + patch = json.load(args.PATCH) + result = jsonpatch.apply_patch(doc, patch) + + if args.in_place: + dirname = os.path.abspath(os.path.dirname(args.ORIGINAL.name)) + + try: + # Attempt to replace the file atomically. We do this by + # creating a temporary file in the same directory as the + # original file so we can atomically move the new file over + # the original later. (This is done in the same directory + # because atomic renames do not work across mount points.) + + fd, pathname = tempfile.mkstemp(dir=dirname) + fp = os.fdopen(fd, 'w') + atomic = True + + except OSError: + # We failed to create the temporary file for an atomic + # replace, so fall back to non-atomic mode by backing up + # the original (if desired) and writing a new file. + + if args.backup: + os.rename(args.ORIGINAL.name, args.ORIGINAL.name + '.orig') + fp = open(args.ORIGINAL.name, 'w') + atomic = False + + else: + # Since we're not replacing the original file in-place, write + # the modified JSON to stdout instead. + + fp = sys.stdout + + # By this point we have some sort of file object we can write the + # modified JSON to. + + json.dump(result, fp, indent=args.indent, ensure_ascii=not(args.preserve_unicode)) + fp.write('\n') + + if args.in_place: + # Close the new file. If we aren't replacing atomically, this + # is our last step, since everything else is already in place. + + fp.close() + + if atomic: + try: + # Complete the atomic replace by linking the original + # to a backup (if desired), fixing up the permissions + # on the temporary file, and moving it into place. + + if args.backup: + os.link(args.ORIGINAL.name, args.ORIGINAL.name + '.orig') + os.chmod(pathname, os.stat(args.ORIGINAL.name).st_mode) + os.rename(pathname, args.ORIGINAL.name) + + except OSError: + # In the event we could not actually do the atomic + # replace, unlink the original to move it out of the + # way and finally move the temporary file into place. + + os.unlink(args.ORIGINAL.name) + os.rename(pathname, args.ORIGINAL.name) + + +if __name__ == "__main__": + main() diff --git a/backend_flask/flask/bin/jsonpointer b/backend_flask/flask/bin/jsonpointer new file mode 100644 index 0000000..a48e8da --- /dev/null +++ b/backend_flask/flask/bin/jsonpointer @@ -0,0 +1,67 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- + + +import argparse +import json +import sys + +import jsonpointer + +parser = argparse.ArgumentParser( + description='Resolve a JSON pointer on JSON files') + +# Accept pointer as argument or as file +ptr_group = parser.add_mutually_exclusive_group(required=True) + +ptr_group.add_argument('-f', '--pointer-file', type=argparse.FileType('r'), + nargs='?', + help='File containing a JSON pointer expression') + +ptr_group.add_argument('POINTER', type=str, nargs='?', + help='A JSON pointer expression') + +parser.add_argument('FILE', type=argparse.FileType('r'), nargs='+', + help='Files for which the pointer should be resolved') +parser.add_argument('--indent', type=int, default=None, + help='Indent output by n spaces') +parser.add_argument('-v', '--version', action='version', + version='%(prog)s ' + jsonpointer.__version__) + + +def main(): + try: + resolve_files() + except KeyboardInterrupt: + sys.exit(1) + + +def parse_pointer(args): + if args.POINTER: + ptr = args.POINTER + elif args.pointer_file: + ptr = args.pointer_file.read().strip() + else: + parser.print_usage() + sys.exit(1) + + return ptr + + +def resolve_files(): + """ Resolve a JSON pointer on JSON files """ + args = parser.parse_args() + + ptr = parse_pointer(args) + + for f in args.FILE: + doc = json.load(f) + try: + result = jsonpointer.resolve_pointer(doc, ptr) + print(json.dumps(result, indent=args.indent)) + except jsonpointer.JsonPointerException as e: + print('Could not resolve pointer: %s' % str(e), file=sys.stderr) + + +if __name__ == "__main__": + main() diff --git a/backend_flask/flask/bin/langchain-server b/backend_flask/flask/bin/langchain-server new file mode 100644 index 0000000..89f380b --- /dev/null +++ b/backend_flask/flask/bin/langchain-server @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from langchain.server import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend_flask/flask/bin/normalizer b/backend_flask/flask/bin/normalizer new file mode 100644 index 0000000..80cd505 --- /dev/null +++ b/backend_flask/flask/bin/normalizer @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from charset_normalizer import cli +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli.cli_detect()) diff --git a/backend_flask/flask/bin/pip b/backend_flask/flask/bin/pip new file mode 100644 index 0000000..75aea4f --- /dev/null +++ b/backend_flask/flask/bin/pip @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend_flask/flask/bin/pip3 b/backend_flask/flask/bin/pip3 new file mode 100644 index 0000000..75aea4f --- /dev/null +++ b/backend_flask/flask/bin/pip3 @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend_flask/flask/bin/pip3.12 b/backend_flask/flask/bin/pip3.12 new file mode 100644 index 0000000..75aea4f --- /dev/null +++ b/backend_flask/flask/bin/pip3.12 @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend_flask/flask/bin/py.test b/backend_flask/flask/bin/py.test new file mode 100644 index 0000000..3d25aa1 --- /dev/null +++ b/backend_flask/flask/bin/py.test @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pytest import console_main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(console_main()) diff --git a/backend_flask/flask/bin/pyrsa-decrypt b/backend_flask/flask/bin/pyrsa-decrypt new file mode 100644 index 0000000..9fe33e0 --- /dev/null +++ b/backend_flask/flask/bin/pyrsa-decrypt @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.cli import decrypt +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(decrypt()) diff --git a/backend_flask/flask/bin/pyrsa-encrypt b/backend_flask/flask/bin/pyrsa-encrypt new file mode 100644 index 0000000..ffbe400 --- /dev/null +++ b/backend_flask/flask/bin/pyrsa-encrypt @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.cli import encrypt +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(encrypt()) diff --git a/backend_flask/flask/bin/pyrsa-keygen b/backend_flask/flask/bin/pyrsa-keygen new file mode 100644 index 0000000..0f4c476 --- /dev/null +++ b/backend_flask/flask/bin/pyrsa-keygen @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.cli import keygen +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(keygen()) diff --git a/backend_flask/flask/bin/pyrsa-priv2pub b/backend_flask/flask/bin/pyrsa-priv2pub new file mode 100644 index 0000000..cd3e0a1 --- /dev/null +++ b/backend_flask/flask/bin/pyrsa-priv2pub @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.util import private_to_public +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(private_to_public()) diff --git a/backend_flask/flask/bin/pyrsa-sign b/backend_flask/flask/bin/pyrsa-sign new file mode 100644 index 0000000..92a2290 --- /dev/null +++ b/backend_flask/flask/bin/pyrsa-sign @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.cli import sign +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(sign()) diff --git a/backend_flask/flask/bin/pyrsa-verify b/backend_flask/flask/bin/pyrsa-verify new file mode 100644 index 0000000..f11d949 --- /dev/null +++ b/backend_flask/flask/bin/pyrsa-verify @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from rsa.cli import verify +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(verify()) diff --git a/backend_flask/flask/bin/pytest b/backend_flask/flask/bin/pytest new file mode 100644 index 0000000..3d25aa1 --- /dev/null +++ b/backend_flask/flask/bin/pytest @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pytest import console_main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(console_main()) diff --git a/backend_flask/flask/bin/python b/backend_flask/flask/bin/python new file mode 100644 index 0000000..b8a0adb --- /dev/null +++ b/backend_flask/flask/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/backend_flask/flask/bin/python3 b/backend_flask/flask/bin/python3 new file mode 100644 index 0000000..ae65fda --- /dev/null +++ b/backend_flask/flask/bin/python3 @@ -0,0 +1 @@ +/usr/bin/python3 \ No newline at end of file diff --git a/backend_flask/flask/bin/python3.12 b/backend_flask/flask/bin/python3.12 new file mode 100644 index 0000000..b8a0adb --- /dev/null +++ b/backend_flask/flask/bin/python3.12 @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/backend_flask/flask/bin/tqdm b/backend_flask/flask/bin/tqdm new file mode 100644 index 0000000..2a536ba --- /dev/null +++ b/backend_flask/flask/bin/tqdm @@ -0,0 +1,8 @@ +#!/home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from tqdm.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend_flask/flask/include/site/python3.12/greenlet/greenlet.h b/backend_flask/flask/include/site/python3.12/greenlet/greenlet.h new file mode 100644 index 0000000..d02a16e --- /dev/null +++ b/backend_flask/flask/include/site/python3.12/greenlet/greenlet.h @@ -0,0 +1,164 @@ +/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ + +/* Greenlet object interface */ + +#ifndef Py_GREENLETOBJECT_H +#define Py_GREENLETOBJECT_H + + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/* This is deprecated and undocumented. It does not change. */ +#define GREENLET_VERSION "1.0.0" + +#ifndef GREENLET_MODULE +#define implementation_ptr_t void* +#endif + +typedef struct _greenlet { + PyObject_HEAD + PyObject* weakreflist; + PyObject* dict; + implementation_ptr_t pimpl; +} PyGreenlet; + +#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type)) + + +/* C API functions */ + +/* Total number of symbols that are exported */ +#define PyGreenlet_API_pointers 12 + +#define PyGreenlet_Type_NUM 0 +#define PyExc_GreenletError_NUM 1 +#define PyExc_GreenletExit_NUM 2 + +#define PyGreenlet_New_NUM 3 +#define PyGreenlet_GetCurrent_NUM 4 +#define PyGreenlet_Throw_NUM 5 +#define PyGreenlet_Switch_NUM 6 +#define PyGreenlet_SetParent_NUM 7 + +#define PyGreenlet_MAIN_NUM 8 +#define PyGreenlet_STARTED_NUM 9 +#define PyGreenlet_ACTIVE_NUM 10 +#define PyGreenlet_GET_PARENT_NUM 11 + +#ifndef GREENLET_MODULE +/* This section is used by modules that uses the greenlet C API */ +static void** _PyGreenlet_API = NULL; + +# define PyGreenlet_Type \ + (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) + +# define PyExc_GreenletError \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) + +# define PyExc_GreenletExit \ + ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) + +/* + * PyGreenlet_New(PyObject *args) + * + * greenlet.greenlet(run, parent=None) + */ +# define PyGreenlet_New \ + (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ + _PyGreenlet_API[PyGreenlet_New_NUM]) + +/* + * PyGreenlet_GetCurrent(void) + * + * greenlet.getcurrent() + */ +# define PyGreenlet_GetCurrent \ + (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) + +/* + * PyGreenlet_Throw( + * PyGreenlet *greenlet, + * PyObject *typ, + * PyObject *val, + * PyObject *tb) + * + * g.throw(...) + */ +# define PyGreenlet_Throw \ + (*(PyObject * (*)(PyGreenlet * self, \ + PyObject * typ, \ + PyObject * val, \ + PyObject * tb)) \ + _PyGreenlet_API[PyGreenlet_Throw_NUM]) + +/* + * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) + * + * g.switch(*args, **kwargs) + */ +# define PyGreenlet_Switch \ + (*(PyObject * \ + (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ + _PyGreenlet_API[PyGreenlet_Switch_NUM]) + +/* + * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) + * + * g.parent = new_parent + */ +# define PyGreenlet_SetParent \ + (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ + _PyGreenlet_API[PyGreenlet_SetParent_NUM]) + +/* + * PyGreenlet_GetParent(PyObject* greenlet) + * + * return greenlet.parent; + * + * This could return NULL even if there is no exception active. + * If it does not return NULL, you are responsible for decrementing the + * reference count. + */ +# define PyGreenlet_GetParent \ + (*(PyGreenlet* (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM]) + +/* + * deprecated, undocumented alias. + */ +# define PyGreenlet_GET_PARENT PyGreenlet_GetParent + +# define PyGreenlet_MAIN \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_MAIN_NUM]) + +# define PyGreenlet_STARTED \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_STARTED_NUM]) + +# define PyGreenlet_ACTIVE \ + (*(int (*)(PyGreenlet*)) \ + _PyGreenlet_API[PyGreenlet_ACTIVE_NUM]) + + + + +/* Macro that imports greenlet and initializes C API */ +/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we + keep the older definition to be sure older code that might have a copy of + the header still works. */ +# define PyGreenlet_Import() \ + { \ + _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ + } + +#endif /* GREENLET_MODULE */ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GREENLETOBJECT_H */ diff --git a/backend_flask/flask/lib64 b/backend_flask/flask/lib64 new file mode 100644 index 0000000..7951405 --- /dev/null +++ b/backend_flask/flask/lib64 @@ -0,0 +1 @@ +lib \ No newline at end of file diff --git a/backend_flask/flask/pyvenv.cfg b/backend_flask/flask/pyvenv.cfg new file mode 100644 index 0000000..58a8765 --- /dev/null +++ b/backend_flask/flask/pyvenv.cfg @@ -0,0 +1,5 @@ +home = /usr/bin +include-system-site-packages = false +version = 3.12.3 +executable = /usr/bin/python3.12 +command = /usr/bin/python3 -m venv /home/gellar/Desktop/program/project/SurfSmart/backend_flask/flask diff --git a/backend_flask/myapp/__init__.py b/backend_flask/myapp/__init__.py new file mode 100644 index 0000000..c76e89f --- /dev/null +++ b/backend_flask/myapp/__init__.py @@ -0,0 +1,123 @@ +# backend/myapp/__init__.py + +import sys +import os +from flask import Flask, jsonify + +# --- 1. Import Extensions --- +# Import specific extension instances defined in extensions.py +# Avoid 'import *' +try: + from .extensions import mongo, cors # Add other extensions like jwt, ma if used +except ImportError as e: + print(f"Error importing extensions: {e}. Make sure extensions.py exists and defines instances.") + # Provide dummy instances or raise an error if extensions are critical + mongo = None + cors = None + +# --- 2. Import Default Config --- +# Assumes config.py is in the parent 'backend' directory. Adjust if moved. +try: + # This relative import works if 'backend' is treated as a package or is in sys.path + from .config import Config, config as config_options # Assuming config.py has a 'config' dict for selection +except ImportError: + print("Warning: Could not import default config from parent directory.") + # Define a minimal fallback Config class + class Config: + SECRET_KEY = os.environ.get('SECRET_KEY') or 'a-default-fallback-secret-key' + DEBUG = False + config_options = {'default': Config} + + +def create_app(config_name='default') -> Flask: + """ + Creates and configures the Flask application instance. + Uses the Application Factory pattern. + """ + # === Step 1: Create Flask App === + # Enable loading from the instance/ folder relative to the 'backend' directory + app = Flask(__name__, instance_relative_config=True) + + # === Step 2: Load Configuration === + # Load default config based on config_name (if using different configs) + selected_config = config_options.get(config_name, Config) + app.config.from_object(selected_config) + + # Load instance config (/instance/config.py) - Overrides defaults + # silent=True prevents errors if the file doesn't exist + app.config.from_pyfile('config.py', silent=True) + + # === Step 3: Initialize Extensions === + if mongo: + try: + mongo.init_app(app) + print("PyMongo initialized successfully.") + except Exception as e: + print(f"Error initializing PyMongo: {e}") + + if cors: + try: + # Configure CORS using settings from app.config + frontend_origin = "http://localhost:5173" + cors.init_app(app, resources={r"/api/*": {"origins": app.config.get('FRONTEND_ORIGIN', '*')}}, supports_credentials=True) + print("CORS initialized successfully.") + except Exception as e: + print(f"Error initializing CORS: {e}") + + # if jwt: + # try: + # jwt.init_app(app) + # print("JWTManager initialized successfully.") + # except Exception as e: + # print(f"Error initializing JWTManager: {e}") + # Add init_app calls for other extensions (ma, migrate, etc.) here + + # === Step 4: Register Blueprints === + # Use unique variable names and appropriate prefixes + try: + # Assuming each blueprint's __init__.py defines an object named 'bp' + from .auth import bp as auth_bp # checked + from .ai_services import bp as ai_services_bp + from .activity import bp as activity_bp + from .dialog import bp as dialog_bp + from .projects import bp as projects_bp # checked + from .urls import bp as urls_bp + + # Register with potentially more specific prefixes + app.register_blueprint(auth_bp, url_prefix='/api/auth') + app.register_blueprint(ai_services_bp, url_prefix="/api/ai") # Changed prefix + app.register_blueprint(activity_bp, url_prefix='/api/activity') + app.register_blueprint(projects_bp, url_prefix='/api/projects') + app.register_blueprint(dialog_bp, url_prefix="/api/dialog") + app.register_blueprint(urls_bp, url_prefix="/api/urls") + print("Blueprints registered successfully.") + + except (ModuleNotFoundError, ImportError) as e: + print(f"Error importing or registering blueprints: {e}. Check blueprint structure and 'bp' variable names.") + except Exception as e: + print(f"An unexpected error occurred during blueprint registration: {e}") + + + # === Step 5: Add Root Route (Optional) === + @app.route("/") + def index(): + # You could add a check here to see if mongo connection is working + db_status = "disconnected" + if mongo: + try: + # The ismaster command is cheap and does not require auth. + mongo.cx.admin.command('ismaster') + db_status = "connected" + except Exception: + db_status = "connection error" + return jsonify({"message": "Backend service is running!", "database_status": db_status}) + + # You can also add other app-wide error handlers here if needed + @app.errorhandler(404) + def page_not_found(e): + return jsonify(error=str(e)), 404 + + print(f"App created with config: {config_name}") + print(f"Instance path: {app.instance_path}") # Check instance path + + return app diff --git a/backend_flask/myapp/activity/__init__.py b/backend_flask/myapp/activity/__init__.py new file mode 100644 index 0000000..9f57b05 --- /dev/null +++ b/backend_flask/myapp/activity/__init__.py @@ -0,0 +1,11 @@ +# myapp/activity/__init__.py + +from flask import Blueprint + +# Define the Blueprint instance for the project activity module. +bp = Blueprint('activity', __name__, url_prefix='/api/activity') + +# Import the routes module for this blueprint. +# This assumes your routes are defined in 'activity_routes.py'. +# The import MUST come AFTER 'bp' is defined. +from . import activity_routes diff --git a/backend_flask/myapp/activity/activity_routes.py b/backend_flask/myapp/activity/activity_routes.py new file mode 100644 index 0000000..a48561f --- /dev/null +++ b/backend_flask/myapp/activity/activity_routes.py @@ -0,0 +1,297 @@ +# myapp/activity/activity_routes.py + +import datetime +import logging +from flask import request, jsonify, current_app, has_app_context # Flask utilities +from bson.objectid import ObjectId, InvalidId # For MongoDB ObjectIds +from functools import wraps # Import wraps for dummy decorator + +# --- Local Blueprint Import --- +from . import bp # Import the 'bp' instance defined in the local __init__.py + +# --- Shared Extensions and Utilities Imports --- +try: + from ..extensions import mongo # Import the initialized PyMongo instance + from ..utils import token_required # Import the authentication decorator +except ImportError: + # Fallback or error handling if imports fail + print("Warning: Could not import mongo or token_required in activity/activity_routes.py.") + mongo = None + # Define a dummy decorator if token_required is missing + def token_required(f): + @wraps(f) # Use wraps for better introspection + def wrapper(*args, **kwargs): + print("ERROR: token_required decorator is not available!") + return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500 + return wrapper + +# --- Schema Imports --- +try: + # Import the relevant schemas defined in schemas.py + from ..schemas import ActivityCreateSchema, ActivitySchema + from marshmallow import ValidationError +except ImportError: + print("Warning: Could not import Activity schemas or ValidationError in activity/activity_routes.py.") + ActivityCreateSchema = None + ActivitySchema = None + ValidationError = None # Define ValidationError as None if import fails + +# --- Helper to get logger safely --- +def _get_logger(): + if has_app_context(): + return current_app.logger + return logging.getLogger(__name__) + +# Note: Routes use paths relative to the '/api/activity' prefix defined in __init__.py. + +@bp.route('/', methods=['POST']) # Path relative to blueprint prefix +@token_required +def create_activity(current_user): + """ + Create a new project activity log entry. + Uses ActivityCreateSchema for input validation. + Expects 'projectId', 'activityType', and optional 'message' in JSON payload. + Verifies user has access to the project. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + logger.error("Invalid current_user object received in create_activity") + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in create_activity: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not ActivityCreateSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + # Get and validate JSON data using the schema + json_data = request.get_json() or {} + schema = ActivityCreateSchema() + try: + validated_data = schema.load(json_data) + except ValidationError as err: + logger.warning(f"Create activity validation failed: {err.messages}") + # Return validation errors from Marshmallow + return jsonify(err.messages), 422 # 422 Unprocessable Entity is appropriate + + # Extract validated data + project_id_str = validated_data['projectId'] # Already validated as ObjectId string by schema if validator is used + activity_type = validated_data['activityType'] + message = validated_data.get('message', "") # Get optional message + + try: + # Convert projectId string to ObjectId (schema validator should ensure format) + try: + project_obj_id = ObjectId(project_id_str) + except InvalidId: + # This should ideally be caught by schema validation if using _validate_object_id + logger.error(f"Schema validation passed but ObjectId conversion failed for: {project_id_str}") + return jsonify({"message": "Invalid projectId format despite schema validation."}), 400 + + # --- Verify Project Access --- + db = mongo.db + project = db.projects.find_one({"_id": project_obj_id}, {"ownerId": 1, "collaborators": 1}) + if not project: + return jsonify({"message": "Project not found."}), 404 # 404 Not Found + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if owner_id != user_id and user_id not in collaborators: + # 403 Forbidden - authenticated but not authorized for this project + return jsonify({"message": "You do not have access to this project."}), 403 + + # --- Prepare and Insert Activity Log --- + now = datetime.datetime.now(datetime.timezone.utc) # Use timezone-aware UTC time + doc = { + "projectId": project_obj_id, + "userId": user_id, # Store the user who performed the activity + "activityType": activity_type, + "message": message, + "createdAt": now + # No updatedAt for activity logs usually + } + result = db.project_activity.insert_one(doc) + + # Return success response with the ID of the new log entry + return jsonify({ + "message": "Activity log created successfully.", + "activity_id": str(result.inserted_id) # Convert ObjectId to string + }), 201 # 201 Created status code + + except KeyError: # Should be caught by token_required or initial check + logger.error(f"User ID (_id) not found in token payload for create_activity.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error creating activity for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while creating the activity log."}), 500 + + +@bp.route('/', methods=['GET']) # Path relative to blueprint prefix +@token_required +def list_activity_logs(current_user): + """ + List activity logs for a specific project. + Uses ActivitySchema for output serialization. + Requires 'projectId' as a query parameter. + Supports 'limit' and 'offset' for pagination. + Verifies user has access to the project. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in list_activity_logs: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not ActivitySchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + try: + # Get query parameters + project_id_str = request.args.get("projectId", "") + limit_str = request.args.get("limit", "20") # Default limit 20 + offset_str = request.args.get("offset", "0") # Default offset 0 + + # Validate and parse pagination parameters + try: + limit = max(int(limit_str), 1) # Ensure limit is at least 1 + except ValueError: + limit = 20 # Default on parsing error + try: + offset = max(int(offset_str), 0) # Ensure offset is non-negative + except ValueError: + offset = 0 # Default on parsing error + + # Project ID is required for listing logs + if not project_id_str: + return jsonify({"message": "Query parameter 'projectId' is required to list logs."}), 400 + + # Convert projectId string to ObjectId + try: + project_obj_id = ObjectId(project_id_str) + except InvalidId: + return jsonify({"message": "Invalid projectId format in query parameter."}), 400 + + # --- Verify Project Access --- + db = mongo.db + project = db.projects.find_one({"_id": project_obj_id}, {"ownerId": 1, "collaborators": 1}) + if not project: + return jsonify({"message": "Project not found."}), 404 + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "You do not have access to this project's activity logs."}), 403 + + # --- Fetch Activity Logs --- + cursor = db.project_activity.find( + {"projectId": project_obj_id} + ).sort("createdAt", -1).skip(offset).limit(limit) # Sort newest first + + # Convert cursor to list for serialization + activity_docs = list(cursor) + + # --- Serialize results using the schema --- + # Instantiate schema for multiple documents + output_schema = ActivitySchema(many=True) + # Use dump() to serialize the list of documents + # Schema handles ObjectId and datetime conversion + serialized_result = output_schema.dump(activity_docs) + + # Return the serialized list of activity logs + return jsonify({"activity_logs": serialized_result}), 200 + + except KeyError: # Should be caught by token_required or initial check + logger.error(f"User ID (_id) not found in token payload for list_activity_logs.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error listing activity logs for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while listing activity logs."}), 500 + + +@bp.route('/', methods=['DELETE']) # Path relative to blueprint prefix +@token_required +def delete_activity_log(current_user, activity_id): + """ + Delete a specific activity log entry by its ID. + Requires the authenticated user to be the owner of the associated project. + (No schema needed for input/output here) + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in delete_activity_log: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + + try: + # Validate activity_id format + try: + obj_activity_id = ObjectId(activity_id) + except InvalidId: + return jsonify({"message": "Invalid activity log ID format."}), 400 + + db = mongo.db + # --- Find Log and Verify Ownership via Project --- + # Fetch projectId to check ownership + activity_doc = db.project_activity.find_one({"_id": obj_activity_id}, {"projectId": 1}) + if not activity_doc: + return jsonify({"message": "Activity log not found."}), 404 + + project_id = activity_doc.get("projectId") + if not project_id or not isinstance(project_id, ObjectId): + logger.error(f"Activity log {activity_id} is missing valid projectId.") + return jsonify({"message": "Cannot verify ownership due to missing project reference."}), 500 + + project = db.projects.find_one({"_id": project_id}, {"ownerId": 1}) + if not project: + logger.warning(f"Project {project_id} associated with activity log {activity_id} not found.") + # Even if project is gone, maybe allow deleting orphan log? Or deny? Deny for safety. + return jsonify({"message": "Associated project not found."}), 404 + + # Verify ownership (only project owner can delete logs in this implementation) + owner_id = project.get("ownerId") + if owner_id != user_id: + return jsonify({"message": "You do not have permission to delete this activity log (must be project owner)."}), 403 + + # --- Perform Deletion --- + result = db.project_activity.delete_one({"_id": obj_activity_id}) + + # --- Return Response --- + if result.deleted_count == 1: + return jsonify({"message": "Activity log deleted successfully."}), 200 + else: + # Log was found but delete failed + logger.warning(f"Activity log {activity_id} found but delete_one removed 0 documents.") + return jsonify({"message": "Failed to delete activity log (already deleted?)."}), 404 + + except KeyError: # Should be caught by token_required or initial check + logger.error(f"User ID (_id) not found in token payload for delete_activity_log.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error deleting activity log {activity_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while deleting the activity log."}), 500 + diff --git a/backend_flask/myapp/ai_services/__init__.py b/backend_flask/myapp/ai_services/__init__.py new file mode 100644 index 0000000..8b5944d --- /dev/null +++ b/backend_flask/myapp/ai_services/__init__.py @@ -0,0 +1,13 @@ +# myapp/api_keys/__init__.py + +from flask import Blueprint + +# Define the Blueprint instance for the API key management module. +# 'api_keys' is the unique name for this blueprint. +# url_prefix='/api/keys' will be prepended to all routes defined in this blueprint. +bp = Blueprint('api_keys', __name__, url_prefix='/api/keys') + +# Import the routes module. +# This connects the routes defined in routes.py to the 'bp' instance. +# This import MUST come AFTER the Blueprint 'bp' is defined. +from . import ai_routes diff --git a/backend_flask/myapp/ai_services/ai_routes.py b/backend_flask/myapp/ai_services/ai_routes.py new file mode 100644 index 0000000..e2b64cc --- /dev/null +++ b/backend_flask/myapp/ai_services/ai_routes.py @@ -0,0 +1,337 @@ +# myapp/ai_services/ai_routes.py +# This file handles API Key management logic. + +import datetime +import logging +from flask import request, jsonify, current_app, has_app_context # Flask utilities +from bson.objectid import ObjectId, InvalidId # For MongoDB ObjectIds +from functools import wraps # Import wraps for dummy decorator + +# --- Local Blueprint Import --- +from . import bp # Import the 'bp' instance defined in the local __init__.py + +# --- Shared Extensions and Utilities Imports --- +try: + from ..extensions import mongo # Import the initialized PyMongo instance + from ..utils import token_required # Import the authentication decorator +except ImportError: + # Fallback or error handling if imports fail + print("Warning: Could not import mongo or token_required in ai_services/ai_routes.py.") + mongo = None + # Define a dummy decorator if token_required is missing + def token_required(f): + @wraps(f) + def wrapper(*args, **kwargs): + print("ERROR: token_required decorator is not available!") + return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500 + return wrapper + +# --- Schema Imports --- +try: + # Import the relevant schemas defined in schemas.py + from ..schemas import APIKeyCreateSchema, APIKeyUpdateSchema, APIKeySchema + from marshmallow import ValidationError +except ImportError: + print("Warning: Could not import APIKey schemas or ValidationError in ai_services/ai_routes.py.") + APIKeyCreateSchema = None + APIKeyUpdateSchema = None + APIKeySchema = None + ValidationError = None # Define ValidationError as None if import fails + +# --- Helper to get logger safely --- +def _get_logger(): + if has_app_context(): + return current_app.logger + return logging.getLogger(__name__) + +# Note: Routes use paths relative to the '/api/ai' prefix. +# Original '/api_list' becomes '/keys' +# Original '/api_list/' becomes '/keys/' + +@bp.route('/keys', methods=['GET']) # Path relative to blueprint prefix +@token_required +def list_api_keys(current_user): + """ + List all API keys belonging to the authenticated user. + Uses APIKeySchema for output serialization. + Fetches keys from the 'api_list' collection associated with the user's ID. + Sorts by update time descending. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in list_api_keys: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not APIKeySchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + try: + # Find all documents in the 'api_list' collection for this user + db = mongo.db + cursor = db.api_list.find({"uid": user_id}).sort("updatedAt", -1) + api_key_docs = list(cursor) # Convert cursor to list + + # --- Serialize results using the schema --- + output_schema = APIKeySchema(many=True) + # Schema handles ObjectId and datetime conversion, and field selection/exclusion + # NOTE: APIKeySchema currently dumps the full key. Consider masking in schema if needed. + serialized_result = output_schema.dump(api_key_docs) + + return jsonify({"api_keys": serialized_result}), 200 + + except KeyError: # Should be caught by initial user_id check + logger.error(f"User ID (_id) not found in token payload for list_api_keys.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error listing API keys for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while listing API keys."}), 500 + + +@bp.route('/keys', methods=['POST']) # Path relative to blueprint prefix +@token_required +def create_api_key(current_user): + """ + Create a new API key entry for the authenticated user. + Uses APIKeyCreateSchema for input validation. + Expects 'name', 'key', and optional 'selected' in JSON payload. + Prevents duplicate names per user. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in create_api_key: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not APIKeyCreateSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + # Get and validate JSON data using the schema + json_data = request.get_json() or {} + schema = APIKeyCreateSchema() + try: + validated_data = schema.load(json_data) + except ValidationError as err: + logger.warning(f"Create API key validation failed: {err.messages}") + return jsonify(err.messages), 422 # Return validation errors + + # Extract validated data + name = validated_data['name'] + api_key = validated_data['key'] + selected = validated_data['selected'] # Schema provides default if missing + + try: + # Check if an API key with the same name already exists for this user + db = mongo.db + existing = db.api_list.find_one({"uid": user_id, "name": name}) + if existing: + # Return 409 Conflict status code for duplicates + return jsonify({"message": f"User already has an API key for {name}."}), 409 + + # --- Prepare and Insert Document --- + now = datetime.datetime.now(datetime.timezone.utc) # Use timezone-aware UTC time + doc = { + "uid": user_id, # Store user's ObjectId + "name": name, + "key": api_key, # Store the provided key + "selected": selected, # Use validated boolean + "createdAt": now, + "updatedAt": now + } + result = db.api_list.insert_one(doc) + + # Return success response with the ID of the newly created key + return jsonify({ + "message": "API key created successfully.", + "api_id": str(result.inserted_id) # Convert ObjectId to string + }), 201 # 201 Created status code + + except KeyError: # Should be caught by initial user_id check + logger.error(f"User ID (_id) not found in token payload for create_api_key.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error creating API key for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while creating API key."}), 500 + + +@bp.route('/keys/', methods=['PUT']) # Path relative to blueprint prefix +@token_required +def update_api_key(current_user, api_id): + """ + Update an existing API key identified by its ID. + Uses APIKeyUpdateSchema for input validation. + Allows updating 'name', 'key', and 'selected' fields. + Verifies ownership before updating. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in update_api_key: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not APIKeyUpdateSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + # Get and validate JSON data using the schema + json_data = request.get_json() or {} + # Note: Update schema should not have required fields, so load won't fail if empty, + # but we check if validated_data is empty later. + schema = APIKeyUpdateSchema() + try: + validated_data = schema.load(json_data) + except ValidationError as err: + logger.warning(f"Update API key validation failed: {err.messages}") + return jsonify(err.messages), 422 + + # If validation passed but no valid fields were provided + if not validated_data: + return jsonify({"message": "No valid fields provided for update."}), 400 + + try: + # Validate api_id format from URL path + try: + object_id = ObjectId(api_id) + except InvalidId: + return jsonify({"message": "Invalid API key ID format."}), 400 + + # Find the document to update + db = mongo.db + doc = db.api_list.find_one({"_id": object_id}) + if not doc: + return jsonify({"message": "API key not found."}), 404 # 404 Not Found + + # Verify that the authenticated user owns this API key + doc_uid = doc.get("uid") + if not doc_uid or doc_uid != user_id: + # 403 Forbidden - user is authenticated but not authorized for this resource + return jsonify({"message": "You do not have permission to update this API key."}), 403 + + # --- Prepare Update Fields based on validated data --- + update_fields = {} + if "name" in validated_data: + new_name = validated_data["name"] + # Check for name conflict only if name is actually changing + if new_name != doc.get("name") and db.api_list.find_one({"uid": user_id, "name": new_name, "_id": {"$ne": object_id}}): + return jsonify({"message": f"User already has another API key named {new_name}."}), 409 # Conflict + update_fields["name"] = new_name + + if "key" in validated_data: + update_fields["key"] = validated_data["key"] + + if "selected" in validated_data: + update_fields["selected"] = validated_data["selected"] # Already boolean from schema + + # If, after validation and processing, there's nothing to update (e.g., only invalid fields were sent) + if not update_fields: + return jsonify({"message": "No valid changes detected in the provided data."}), 400 + + # Always update the 'updatedAt' timestamp + update_fields["updatedAt"] = datetime.datetime.now(datetime.timezone.utc) + + # Perform the update operation in the database + result = db.api_list.update_one( + {"_id": object_id}, # Filter by ID + {"$set": update_fields} # Set the new values + ) + + # Check if the document was found and potentially modified + if result.matched_count == 1: + return jsonify({"message": "API key updated successfully."}), 200 + else: + # This case should ideally not happen if find_one succeeded, but included for safety + logger.warning(f"Update matched count was {result.matched_count} for api_id {api_id}") + return jsonify({"message": "API key update failed (key not found after initial check)."}), 404 + + except KeyError: # Should be caught by initial user_id check + logger.error(f"User ID (_id) not found in token payload for update_api_key.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error updating API key {api_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while updating the API key."}), 500 + + +@bp.route('/keys/', methods=['DELETE']) # Path relative to blueprint prefix +@token_required +def delete_api_key(current_user, api_id): + """ + Delete an API key identified by its ID. + Verifies ownership before deleting. + (No schema needed for input/output here) + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in delete_api_key: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + + try: + # Validate api_id format from URL path + try: + object_id = ObjectId(api_id) + except InvalidId: + return jsonify({"message": "Invalid API key ID format."}), 400 + + # Find the document to delete + db = mongo.db + doc = db.api_list.find_one({"_id": object_id}, {"uid": 1}) # Fetch only uid for check + if not doc: + return jsonify({"message": "API key not found."}), 404 # 404 Not Found + + # Verify that the authenticated user owns this API key + doc_uid = doc.get("uid") + if not doc_uid or doc_uid != user_id: + # 403 Forbidden + return jsonify({"message": "You do not have permission to delete this API key."}), 403 + + # Perform the delete operation + result = db.api_list.delete_one({"_id": object_id}) + + # Check if the deletion was successful + if result.deleted_count == 1: + return jsonify({"message": "API key deleted successfully."}), 200 # 200 OK or 204 No Content are suitable + else: + # This case means the document existed initially but couldn't be deleted + logger.error(f"Failed to delete API key {api_id} despite finding it initially.") + return jsonify({"message": "Failed to delete API key (already deleted?)."}), 404 # Or 500 + + except KeyError: # Should be caught by initial user_id check + logger.error(f"User ID (_id) not found in token payload for delete_api_key.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error deleting API key {api_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while deleting the API key."}), 500 diff --git a/backend_flask/myapp/auth/__init__.py b/backend_flask/myapp/auth/__init__.py new file mode 100644 index 0000000..cf63c49 --- /dev/null +++ b/backend_flask/myapp/auth/__init__.py @@ -0,0 +1,13 @@ +# myapp/auth/__init__.py + +from flask import Blueprint + +# Define the Blueprint instance for the authentication module. +# 'auth' is the unique name for this blueprint. +# url_prefix='/api/auth' will be prepended to all routes defined in this blueprint. +bp = Blueprint('auth', __name__, url_prefix='/api/auth') + +# Import the routes module. +# This connects the routes defined in routes.py to the 'bp' instance. +# This import MUST come AFTER the Blueprint 'bp' is defined to avoid circular imports. +from . import auth_routes diff --git a/backend_flask/myapp/auth/auth_routes.py b/backend_flask/myapp/auth/auth_routes.py new file mode 100644 index 0000000..acbb8ff --- /dev/null +++ b/backend_flask/myapp/auth/auth_routes.py @@ -0,0 +1,444 @@ +# myapp/auth/auth_routes.py + +import datetime +import jwt # For encoding JWT tokens +import logging +from flask import request, jsonify, current_app, has_app_context # Flask utilities +from werkzeug.security import generate_password_hash, check_password_hash # For hashing and checking passwords +from bson.objectid import ObjectId, InvalidId # For converting string IDs to MongoDB ObjectId +from functools import wraps # Import wraps for dummy decorator + +# --- Local Blueprint Import (Moved to Top) --- +# Import the 'bp' instance defined in the local __init__.py FIRST +# This often helps resolve circular import issues involving blueprints and utilities/models. +from . import bp + + +# --- Shared Utilities Import --- +# Import the token_required decorator from the utils module +try: + # Assumes utils.py is in the parent 'myapp' package + from ..utils import token_required +except ImportError as e: + # Fallback or error handling if the decorator isn't found + print("Warning: token_required decorator not found in auth/auth_routes.py. Protected routes will fail.") + print(e) + # Define a dummy decorator to prevent NameError, but it won't protect routes + def token_required(f): + @wraps(f) + def wrapper(*args, **kwargs): + print("ERROR: token_required decorator is not available!") + return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500 + return wrapper + + +# --- Schema Imports --- +try: + # Import the relevant schemas defined in schemas.py + from ..schemas import UserRegistrationSchema, UserLoginSchema, UserSchema, UserUpdateSchema + from marshmallow import ValidationError +except ImportError: + print("Warning: Could not import User schemas or ValidationError in auth/auth_routes.py.") + UserRegistrationSchema = None + UserLoginSchema = None + UserSchema = None + UserUpdateSchema = None + ValidationError = None + +# --- Shared Extensions Import --- +# Import mongo for direct use (alternative to current_app.mongo) +try: + from ..extensions import mongo +except ImportError: + print("Warning: Could not import mongo extension in auth/auth_routes.py.") + mongo = None + + +# --- Helper to get logger safely --- +def _get_logger(): + if has_app_context(): + return current_app.logger + return logging.getLogger(__name__) + +# Note: Routes use paths relative to the '/api/auth' prefix defined in __init__.py. + +@bp.route('/register', methods=['POST']) +def register(): + """ + Register a new user. + Uses UserRegistrationSchema for input validation. + Expects 'username', 'email', 'password' in JSON payload. + Checks for existing username/email. Hashes password. Stores user. + Returns a JWT token and serialized user info (using UserSchema) upon success. + """ + logger = _get_logger() + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not UserRegistrationSchema or not UserSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + # Get and validate JSON data using the schema + json_data = request.get_json() or {} + schema = UserRegistrationSchema() + try: + validated_data = schema.load(json_data) + except ValidationError as err: + logger.warning(f"Registration validation failed: {err.messages}") + return jsonify(err.messages), 422 # Return validation errors + + # Extract validated data + username = validated_data['username'] + email = validated_data['email'] + password = validated_data['password'] # Raw password (load_only) + + try: + db = mongo.db # Use imported mongo instance's db attribute + # Check if username or email already exists + if db.users.find_one({"username": username}): + return jsonify({"message": "Username already exists."}), 409 # 409 Conflict + if db.users.find_one({"email": email}): + return jsonify({"message": "Email already registered."}), 409 # 409 Conflict + except AttributeError: + logger.error("PyMongo extension not initialized or db attribute missing.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Database error checking existing user: {e}", exc_info=True) + return jsonify({"message": "Database error during registration check."}), 500 + + # Hash the password before storing + hashed_pw = generate_password_hash(password) + + # Create the new user document + now = datetime.datetime.now(datetime.timezone.utc) # Use timezone-aware UTC time + new_user_doc = { + "username": username, + "email": email, + "password": hashed_pw, # Store the hashed password + "createdAt": now, + "updatedAt": now + } + + # Insert the new user into the database + try: + result = db.users.insert_one(new_user_doc) + user_id = result.inserted_id # This is an ObjectId + # Fetch the created user document to serialize it + created_user = db.users.find_one({"_id": user_id}) + if not created_user: # Should not happen, but check + logger.error(f"Failed to retrieve user immediately after insertion: {user_id}") + # Don't fail the whole registration, maybe just log and proceed without user data in response + created_user = {"_id": user_id, "username": username, "email": email} # Construct manually if needed + + except Exception as e: + logger.error(f"Error inserting new user: {e}", exc_info=True) + return jsonify({"message": "An error occurred during registration."}), 500 + + # Generate JWT token using settings from app config + try: + secret_key = current_app.config['SECRET_KEY'] + algo = current_app.config.get('JWT_ALGORITHM', 'HS256') + exp_hours = current_app.config.get('JWT_EXP_DELTA_HOURS', 24) + + token_payload = { + "user_id": str(user_id), # Convert ObjectId to string for JWT payload + "exp": datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(hours=exp_hours) + } + token = jwt.encode(token_payload, secret_key, algorithm=algo) + + except KeyError: + logger.error("SECRET_KEY not configured in Flask app for JWT.") + return jsonify({"message": "Server configuration error: JWT secret missing."}), 500 + except Exception as e: + logger.error(f"Error encoding JWT during registration: {e}", exc_info=True) + return jsonify({"message": "Could not generate authentication token."}), 500 + + # Serialize the created user data using UserSchema (excludes password) + output_schema = UserSchema() + serialized_user = output_schema.dump(created_user) + + # Return success response with token and serialized user info + return jsonify({ + "message": "User registered successfully.", + "token": token, + "user": serialized_user # Return user object instead of just id + }), 201 # 201 Created + + +@bp.route('/login', methods=['POST']) +def login(): + """ + Log in an existing user. + Uses UserLoginSchema for input validation. + Expects 'username' and 'password' in JSON payload. + Verifies credentials against the database. + Returns a JWT token and serialized user info (using UserSchema) upon success. + """ + logger = _get_logger() + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not UserLoginSchema or not UserSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + # Get and validate JSON data using the schema + json_data = request.get_json() or {} + schema = UserLoginSchema() + try: + validated_data = schema.load(json_data) + except ValidationError as err: + logger.warning(f"Login validation failed: {err.messages}") + return jsonify(err.messages), 422 + + username = validated_data['username'] + password = validated_data['password'] # Raw password (load_only) + + # Access the database + try: + db = mongo.db + if db is None: raise AttributeError("db attribute is None") + # Find user by username + user_doc = db.users.find_one({"username": username}) + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly during login.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Database error during login for user {username}: {e}", exc_info=True) + return jsonify({"message": "An error occurred during login."}), 500 + + # Check if user exists and if the password hash matches + if not user_doc or 'password' not in user_doc or not check_password_hash(user_doc["password"], password): + return jsonify({"message": "Invalid credentials."}), 401 # Use 401 for authentication failure + + # Generate JWT token using settings from app config + try: + user_id = user_doc["_id"] # Get ObjectId + secret_key = current_app.config['SECRET_KEY'] + algo = current_app.config.get('JWT_ALGORITHM', 'HS256') + exp_hours = current_app.config.get('JWT_EXP_DELTA_HOURS', 24) + + token_payload = { + "user_id": str(user_id), # Convert ObjectId to string + "exp": datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(hours=exp_hours) + } + token = jwt.encode(token_payload, secret_key, algorithm=algo) + + except KeyError: + logger.error("SECRET_KEY not configured in Flask app for JWT.") + return jsonify({"message": "Server configuration error: JWT secret missing."}), 500 + except Exception as e: + logger.error(f"Error encoding JWT for user {username}: {e}", exc_info=True) + return jsonify({"message": "Could not generate authentication token."}), 500 + + # Serialize the user data using UserSchema (excludes password) + output_schema = UserSchema() + serialized_user = output_schema.dump(user_doc) + + # Return success response with token and serialized user info + return jsonify({ + "message": "Login successful.", + "token": token, + "user": serialized_user # Return user object instead of just id + }), 200 + + +@bp.route('/delete_account', methods=['DELETE']) +@token_required # Apply the decorator to protect the route and inject 'current_user' +def delete_account(current_user): + """ + Delete the account of the currently authenticated user (identified by token). + Also handles associated data like projects and URLs. + Requires a valid JWT token. + (No schema needed for input/output here) + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id_str = str(current_user.get("user_id") or current_user.get("_id")) + if not user_id_str: + return jsonify({"message": "Invalid token or user information not found in token."}), 401 + user_id = ObjectId(user_id_str) # Convert string ID back to ObjectId + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in delete_account from token data {current_user}: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + + try: + db = mongo.db + + # --- Data handling logic (remains the same) --- + # [ Deletion logic for user, projects, urls, activity, dialogs ] + # 1. Delete the user document itself + user_result = db.users.delete_one({"_id": user_id}) + + # 2. Remove user from collaborator lists in projects they didn't own + db.projects.update_many( + {"ownerId": {"$ne": user_id}, "collaborators": user_id}, + {"$pull": {"collaborators": user_id}} + ) + + # 3. Handle projects owned by the user + owned_projects_cursor = db.projects.find({"ownerId": user_id}, {"_id": 1, "collaborators": 1}) + project_ids_to_delete = [] + projects_to_reassign = [] + + for project in owned_projects_cursor: + project_id = project["_id"] + collaborators = [collab_id for collab_id in project.get("collaborators", []) if collab_id != user_id] + if collaborators: + new_owner = collaborators[0] + projects_to_reassign.append({ + "filter": {"_id": project_id}, + "update": { + "$set": {"ownerId": new_owner, "lastActivityBy": new_owner}, + "$pull": {"collaborators": new_owner} + } + }) + else: + project_ids_to_delete.append(project_id) + + if projects_to_reassign: + for reassignment in projects_to_reassign: + db.projects.update_one(reassignment["filter"], reassignment["update"]) + logger.info(f"Reassigned ownership for {len(projects_to_reassign)} projects previously owned by {user_id_str}") + + if project_ids_to_delete: + delete_owned_projects_result = db.projects.delete_many({"_id": {"$in": project_ids_to_delete}}) + logger.info(f"Deleted {delete_owned_projects_result.deleted_count} projects owned by {user_id_str} with no remaining collaborators.") + # Cascade deletes + delete_urls_result = db.urls.delete_many({"projectId": {"$in": project_ids_to_delete}}) + logger.info(f"Deleted {delete_urls_result.deleted_count} URLs for deleted projects of user {user_id_str}") + delete_activity_result = db.project_activity.delete_many({"projectId": {"$in": project_ids_to_delete}}) + logger.info(f"Deleted {delete_activity_result.deleted_count} activity logs for deleted projects of user {user_id_str}") + delete_dialog_result = db.dialog_activity.delete_many({"projectId": {"$in": project_ids_to_delete}}) + logger.info(f"Deleted {delete_dialog_result.deleted_count} dialog sessions for deleted projects of user {user_id_str}") + # --- End data handling logic --- + + if user_result.deleted_count == 1: + return jsonify({"message": "Account and associated data handled successfully."}), 200 + elif user_result.deleted_count == 0: + return jsonify({"message": "User not found or already deleted."}), 404 + else: + logger.warning(f"Unexpected deleted_count ({user_result.deleted_count}) for user {user_id}") + return jsonify({"message": "An issue occurred during account deletion."}), 500 + + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error during account deletion for user {user_id_str}: {e}", exc_info=True) + return jsonify({"message": "An internal error occurred during account deletion."}), 500 + + +@bp.route('/logout', methods=['POST']) +@token_required # Ensures only logged-in users can call logout (though it's stateless) +def logout(current_user): + """ + Logs out a user (stateless JWT). Client is responsible for discarding the token. + (No schema needed for input/output here) + """ + return jsonify({"message": "Logout successful. Please discard your token."}), 200 + + +@bp.route('/account', methods=['PUT']) +@token_required # Protect the route and get user info from token +def update_account(current_user): + """ + Update the authenticated user's username, email, and/or password. + Uses UserUpdateSchema for input validation. + Expects JSON payload with optional 'username', 'email', 'password' fields. + (Returns simple message, no schema needed for output) + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id_str = str(current_user.get("_id") or current_user.get("user_id")) + if not user_id_str: + return jsonify({"message": "User ID not found in token."}), 401 + user_id = ObjectId(user_id_str) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error from token ({current_user}) in update_account: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not UserUpdateSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + # Get and validate JSON data using the schema + json_data = request.get_json() or {} + schema = UserUpdateSchema() + try: + # Load validates optional fields based on schema rules + validated_data = schema.load(json_data) + except ValidationError as err: + logger.warning(f"Update account validation failed: {err.messages}") + return jsonify(err.messages), 422 + + # If validation passed but no valid fields were provided + if not validated_data: + return jsonify({"message": "No valid update fields provided (username, email, or password)."}), 400 + + db = mongo.db + update_fields = {} # Dictionary to hold fields to be updated + db_validation_errors = {} # Store potential db-level validation errors (like uniqueness) + + # --- Validate uniqueness and prepare updates based on validated_data --- + try: + # Check username uniqueness if provided and validated + if "username" in validated_data: + new_username = validated_data["username"] + if db.users.find_one({"username": new_username, "_id": {"$ne": user_id}}): + db_validation_errors["username"] = "Username is already taken." + else: + update_fields["username"] = new_username + + # Check email uniqueness if provided and validated + if "email" in validated_data: + new_email = validated_data["email"] + if db.users.find_one({"email": new_email, "_id": {"$ne": user_id}}): + db_validation_errors["email"] = "Email is already registered by another user." + else: + update_fields["email"] = new_email + + # Hash password if provided and validated + if "password" in validated_data: + update_fields["password"] = generate_password_hash(validated_data["password"]) + + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly during validation.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error during database validation for user {user_id}: {e}", exc_info=True) + return jsonify({"message": "An error occurred during data validation."}), 500 + + # If database validation errors occurred (e.g., uniqueness checks) + if db_validation_errors: + return jsonify({"message": "Validation errors occurred.", "errors": db_validation_errors}), 409 # 409 Conflict + + # If there are fields to update, add the timestamp and perform the update + if update_fields: + update_fields["updatedAt"] = datetime.datetime.now(datetime.timezone.utc) + try: + result = db.users.update_one({"_id": user_id}, {"$set": update_fields}) + if result.matched_count == 0: + # This case means the user_id from the token doesn't exist in the DB anymore + return jsonify({"message": "User not found."}), 404 + # modified_count might be 0 if the provided data was the same as existing data + # We consider it a success even if no fields were technically modified + return jsonify({"message": "Account updated successfully."}), 200 + + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly during update.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error updating account for user {user_id}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while updating the account."}), 500 + else: + # This case should ideally not be reached due to the checks at the beginning, + # but included for completeness if validation passed with no update fields. + return jsonify({"message": "No changes were requested or fields were invalid."}), 400 + diff --git a/backend_flask/myapp/config.py b/backend_flask/myapp/config.py new file mode 100644 index 0000000..6baa2ee --- /dev/null +++ b/backend_flask/myapp/config.py @@ -0,0 +1,31 @@ +import os +import secrets + + + +class Config: + # MongoDB Atlas connection string: set it in your environment variables + MONGO_URI: str = os.environ.get( + "MONGO_URI", + "mongodb+srv://surfsmart_server:IVV0mzUcwoEqHjNV@projectdatacluster.ki0t3z8.mongodb.net/surfsmart?retryWrites=true&w=majority&appName=ProjectDataCluster" + ) + + # Flask secret key for sessions and JWT (use a secure value in production) + SECRET_KEY: str = os.environ.get("SECRET_KEY", secrets.token_hex(32)) + + # JWT configuration + JWT_ALGORITHM: str = "HS256" + JWT_EXP_DELTA_HOURS: int = 2 + + # TODO make this name selectable + GEMINI_MODEL_NAME = 'gemini-1.5-pro-latest' + + # For celery + CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://localhost:6379/0") + CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", "redis://localhost:6379/0") + + + +config = { + "default": Config() + } \ No newline at end of file diff --git a/backend_flask/myapp/dialog/__init__.py b/backend_flask/myapp/dialog/__init__.py new file mode 100644 index 0000000..d79695e --- /dev/null +++ b/backend_flask/myapp/dialog/__init__.py @@ -0,0 +1,13 @@ +# myapp/dialog/__init__.py + +from flask import Blueprint + +# Define the Blueprint instance for the dialog module. +# 'dialog' is the unique name for this blueprint. +# url_prefix='/api/dialog' will be prepended to all routes defined in this blueprint. +bp = Blueprint('dialog', __name__, url_prefix='/api/dialog') + +# Import the routes module. +# This connects the routes defined in routes.py (including helper functions) to the 'bp' instance. +# This import MUST come AFTER the Blueprint 'bp' is defined. +from . import dialog_routes diff --git a/backend_flask/myapp/dialog/dialog_routes.py b/backend_flask/myapp/dialog/dialog_routes.py new file mode 100644 index 0000000..7b40fac --- /dev/null +++ b/backend_flask/myapp/dialog/dialog_routes.py @@ -0,0 +1,787 @@ +# myapp/dialog/dialog_routes.py + +import datetime +import os +import logging +from flask import request, jsonify, current_app, has_app_context # Flask utilities +from bson.objectid import ObjectId, InvalidId # For MongoDB ObjectIds +from functools import wraps # Import wraps for dummy decorator + + +# --- Local Blueprint Import --- +from . import bp # Import the 'bp' instance defined in the local __init__.py + +# --- Shared Extensions and Utilities Imports --- +try: + from ..extensions import mongo # Import the initialized PyMongo instance + from ..utils import token_required # Import the authentication decorator +except ImportError: + # Fallback or error handling if imports fail + print("Warning: Could not import mongo or token_required in dialog/dialog_routes.py.") + mongo = None + # Define a dummy decorator if token_required is missing + def token_required(f): + @wraps(f) + def wrapper(*args, **kwargs): + print("ERROR: token_required decorator is not available!") + return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500 + return wrapper + +# --- Schema Imports --- +try: + # Import the relevant schemas defined in schemas.py + from ..schemas import ( + DialogCreateSchema, DialogSendMessageSchema, + DialogSchema, DialogSummarySchema + ) + from marshmallow import ValidationError +except ImportError: + print("Warning: Could not import Dialog schemas or ValidationError in dialog/dialog_routes.py.") + DialogCreateSchema = None + DialogSendMessageSchema = None + DialogSchema = None + DialogSummarySchema = None + ValidationError = None # Define ValidationError as None if import fails + + +# --- External API and Langchain Imports --- +# Keep these imports conditional to avoid errors if libraries are not installed +try: + import google.generativeai as genai + from google.api_core import exceptions as google_exceptions +except ImportError: + print("Warning: google.generativeai not installed. Gemini functionality will fail.") + genai = None + google_exceptions = None + +try: + from langchain_community.embeddings import OpenAIEmbeddings + from langchain_community.vectorstores import FAISS + from langchain.docstore.document import Document +except ImportError: + print("Warning: Langchain components not installed. Vector store functionality will fail.") + OpenAIEmbeddings = None + FAISS = None + Document = None + +# --- Constants --- +# Adjust model name if needed, potentially move to config +MAX_HISTORY_MESSAGES = 20 # Max messages to keep in history for context (if applicable, not used in current Gemini call) + +# --- Helper to get logger safely --- +def _get_logger(): + if has_app_context(): + return current_app.logger + return logging.getLogger(__name__) + + +################################################## +# Helper Functions (kept within this module) +################################################## + +# --- Knowledge Base Helpers --- +# (generate_knowledge_base_message, update_project_knowledge, process_api_response_and_update_knowledge - unchanged from previous version) +def generate_knowledge_base_message(project_id): + """ + Retrieves all URL documents for the given project and formats them + into a single knowledge base message string for context. + + Args: + project_id (ObjectId): The ObjectId of the project. + + Returns: + str: A formatted string containing knowledge base entries, or an empty string on error. + Returns "No external knowledge available." if no URLs are found. + """ + logger = _get_logger() + if not mongo: + logger.error("Mongo extension not available in generate_knowledge_base_message.") + return "" # Cannot proceed without DB connection + + try: + # Ensure project_id is ObjectId + if not isinstance(project_id, ObjectId): + project_id = ObjectId(project_id) + + # Find all URL documents linked to the project ID + urls_cursor = mongo.db.urls.find({"projectId": project_id}) + knowledge_entries = [] + for doc in urls_cursor: + # Format keywords with percentages + keywords_list = [f"{kw.get('word', '')}({kw.get('percentage', 'N/A')}%)" for kw in doc.get("keywords", [])] + # Aggregate relevant fields into a string entry + aggregated = ( + f"Title: {doc.get('title', 'N/A')}\n" + f"URL: {doc.get('url', 'N/A')}\n" + # f"Starred: {doc.get('starred', False)}\n" # Optionally include starred status + f"Note: {doc.get('note', 'N/A')}\n" + f"Keywords: {', '.join(keywords_list) if keywords_list else 'N/A'}\n" + f"Summary: {doc.get('summary', 'N/A')}" + ) + knowledge_entries.append(aggregated) + + # Handle case where no URLs are found + if not knowledge_entries: + return "No external knowledge available for this project." + + # Combine entries and truncate if necessary + combined = "\n\n---\n\n".join(knowledge_entries) + # Use Flask config for max length if available, otherwise default + max_length = current_app.config.get('KNOWLEDGE_BASE_MAX_LENGTH', 4000) if has_app_context() else 4000 + if len(combined) > max_length: + combined = combined[:max_length] + " ... [truncated]" + return combined + + except InvalidId: + logger.error(f"Invalid project_id format passed to generate_knowledge_base_message: {project_id}") + return "Error: Invalid project identifier." + except AttributeError: + logger.error("PyMongo extension not initialized or available.") + return "Error: Database configuration issue." + except Exception as e: + # Log the error with project ID for easier debugging + logger.error(f"Error generating knowledge base message for project {project_id}: {e}", exc_info=True) + return "" # Return empty string on generic error + + +def update_project_knowledge(project_id): + """ + Updates the project's 'summary' field with a condensed version of its knowledge base. + This acts as a cache or snapshot for quick reference. + + Args: + project_id (ObjectId): The ObjectId of the project. + """ + logger = _get_logger() + if not mongo: + logger.error("Mongo extension not available in update_project_knowledge.") + return + try: + # Ensure project_id is ObjectId + if not isinstance(project_id, ObjectId): + project_id = ObjectId(project_id) + + knowledge_message = generate_knowledge_base_message(project_id) + # Condense the message for storage (e.g., first 1000 chars) + condensed = knowledge_message[:1000] if len(knowledge_message) > 1000 else knowledge_message + + # Update the project document in the 'projects' collection + mongo.db.projects.update_one( + {"_id": project_id}, + {"$set": {"summary": condensed, "updatedAt": datetime.datetime.now(datetime.timezone.utc)}} + ) + except InvalidId: + logger.error(f"Invalid project_id format passed to update_project_knowledge: {project_id}") + except AttributeError: + logger.error("PyMongo extension not initialized or available.") + except Exception as e: + logger.error(f"Error updating project knowledge cache for {project_id}: {e}", exc_info=True) + + +def process_api_response_and_update_knowledge(api_response, project_id): + """ + Placeholder function to process LLM responses. Currently updates project knowledge cache. + """ + # For now, simply update the cached summary in the project document + update_project_knowledge(project_id) + # Future enhancements could go here + + +# --- Vector Store Helpers --- +def build_vector_knowledge_base(project_id, query, k=3): + """ + Builds a vector index (FAISS) from project URL content and retrieves top-k relevant documents. + + Args: + project_id (ObjectId): The ObjectId of the project. + query (str): The user query for similarity search. + k (int): The number of top similar documents to retrieve. + + Returns: + List[Document]: A list of LangChain Document objects, or an empty list on error/no data. + """ + logger = _get_logger() + # Check if necessary components are available + if not mongo or not OpenAIEmbeddings or not FAISS or not Document: + logger.error("Missing dependencies (Mongo, Langchain) for build_vector_knowledge_base.") + return [] + + try: + # Ensure project_id is ObjectId + if not isinstance(project_id, ObjectId): + project_id = ObjectId(project_id) + + # Fetch URL documents from MongoDB + urls_cursor = mongo.db.urls.find({"projectId": project_id}) + texts = [] + metadatas = [] + + for doc in urls_cursor: + # Aggregate text content for embedding + keywords_list = [f"{kw.get('word', '')}({kw.get('percentage', 'N/A')}%)" for kw in doc.get("keywords", [])] + aggregated = ( + f"Title: {doc.get('title', 'N/A')}\n" + f"URL: {doc.get('url', 'N/A')}\n" + # f"Starred: {doc.get('starred', False)}\n" # Optionally include more fields + f"Note: {doc.get('note', 'N/A')}\n" + f"Keywords: {', '.join(keywords_list) if keywords_list else 'N/A'}\n" + f"Summary: {doc.get('summary', 'N/A')}" + ) + texts.append(aggregated) + # Store relevant metadata alongside the text + metadatas.append({"url": doc.get("url", ""), "title": doc.get("title", ""), "doc_id": str(doc["_id"])}) + + # If no text content found, return empty list + if not texts: + logger.info(f"No URL text content found for project {project_id} to build vector base.") + return [] + + # Initialize embeddings model (ensure OPENAI_API_KEY is set in environment or config) + try: + # Check if OPENAI_API_KEY exists (more robust check) + openai_api_key = os.environ.get("OPENAI_API_KEY") or (current_app.config.get("OPENAI_API_KEY") if has_app_context() else None) + if not openai_api_key: + raise ValueError("OPENAI_API_KEY environment variable or Flask config not set.") + embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key) + except Exception as e: + logger.error(f"Failed to initialize OpenAIEmbeddings: {e}. Check API key.", exc_info=False) # Avoid logging key + return [] + + + # Build FAISS vector store from the texts and metadata + vectorstore = FAISS.from_texts(texts=texts, embedding=embeddings, metadatas=metadatas) + + # Perform similarity search + docs = vectorstore.similarity_search(query, k=k) + return docs + + except InvalidId: + logger.error(f"Invalid project_id format passed to build_vector_knowledge_base: {project_id}") + return [] + except AttributeError: + logger.error("PyMongo or Langchain components not initialized or available.") + return [] + except Exception as e: + logger.error(f"Error building vector knowledge base for project {project_id}: {e}", exc_info=True) + return [] # Return empty list on error + + +def build_vector_based_prompt_with_knowledge(user_message, project_id): + """ + Constructs a prompt for the LLM, incorporating context from vector search results. + + Args: + user_message (str): The user's latest message/query. + project_id (ObjectId): The ObjectId of the project. + + Returns: + str: The formatted prompt string including retrieved knowledge. + """ + # Retrieve top 3 relevant documents using vector search + retrieved_docs = build_vector_knowledge_base(project_id, user_message, k=3) + + # Format the retrieved knowledge for inclusion in the prompt + if retrieved_docs: + # Join the page_content of each retrieved LangChain Document + knowledge_text = "\n\n---\n\n".join([f"Source URL: {doc.metadata.get('url', 'N/A')}\n{doc.page_content}" for doc in retrieved_docs]) + else: + knowledge_text = "No relevant external knowledge found via vector search for this query." + + # Construct the final prompt with instructions, knowledge, and user query + # Make prompt more specific about using ONLY the provided knowledge + prompt = ( + "You are an expert research assistant. Analyze the following retrieved documents, which contain information " + "(titles, URLs, notes, keywords, summaries) from websites related to the current research project. " + "Base your response *only* on this provided information and the user's query.\n\n" + "Common user questions might involve:\n" + "- Summarizing key topics from the retrieved documents.\n" + "- Suggesting research directions based *only* on the retrieved documents.\n" + "- Recommending specific URLs *from the retrieved documents* that are most relevant.\n" + "- Identifying potentially redundant information *within the retrieved documents*.\n\n" + "--- Relevant Retrieved Knowledge ---\n" + f"{knowledge_text}\n" + "--- End Retrieved Knowledge ---\n\n" + "User Query:\n" + f"{user_message}\n\n" + "Based strictly on the retrieved knowledge and the user query, provide your analysis and recommendations:" + ) + return prompt + + +# --- Gemini Message Formatting Helper (Not currently used by send_dialog_message) --- +def format_messages_for_gemini(db_messages, max_history=MAX_HISTORY_MESSAGES): + """ + Converts dialog history from DB format to Gemini API format. + Handles role mapping ('system' -> 'model') and ensures role alternation. + """ + logger = _get_logger() + contents = [] + last_role = None + recent_messages = db_messages[-max_history:] # Get the most recent messages + + for msg in recent_messages: + gemini_role = "model" if msg.get("role") == "system" else "user" + if gemini_role == last_role: + logger.warning(f"Skipping consecutive message of role '{gemini_role}' in formatting.") + continue + contents.append({ + "role": gemini_role, + "parts": [{"text": msg.get("content", "")}] + }) + last_role = gemini_role + + if contents and contents[-1]["role"] != "user": + logger.warning("Formatted history for Gemini does not end with a 'user' message.") + return contents + + +################################################## +# Dialog API Endpoints +################################################## + +# Note: Routes use paths relative to the '/api/dialog' prefix. + +@bp.route('/', methods=['POST']) # Path relative to prefix +@token_required +def create_dialog_session(current_user): + """ + Creates a new dialog session associated with a project. + Uses DialogCreateSchema for input validation. + Expects JSON: { "projectId": "", "sessionId": "", "startMessage": "" } + Determines the LLM provider based on the user's selected API key. + """ + logger = _get_logger() + # Validate user + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in create_dialog_session: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not DialogCreateSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + # Get and validate JSON data using the schema + json_data = request.get_json() or {} + schema = DialogCreateSchema() + try: + validated_data = schema.load(json_data) + except ValidationError as err: + logger.warning(f"Create dialog session validation failed: {err.messages}") + return jsonify(err.messages), 422 # Return validation errors + + # Extract validated data + project_id_str = validated_data['projectId'] # Already validated as ObjectId string by schema + session_id = validated_data.get("sessionId", "") # Optional + start_message = validated_data.get("startMessage", "").strip() # Optional + + try: + # Convert project ID + project_obj_id = ObjectId(project_id_str) # Conversion should succeed due to schema validation + + # Find the user's selected API key + db = mongo.db + selected_api = db.api_list.find_one({"uid": user_id, "selected": True}) + if not selected_api: + return jsonify({"message": "User has no selected API provider. Please select one in API Keys."}), 400 + + provider = selected_api.get("name") + api_key_exists = bool(selected_api.get("key")) # Check if key value exists + + # Validate provider and key presence + allowed_providers = ["Gemini", "Deepseek", "Chatgpt"] # Consider from config + if provider not in allowed_providers: + return jsonify({"message": f"Selected provider '{provider}' is not supported."}), 400 + if not api_key_exists: + return jsonify({"message": f"API key value missing for selected provider '{provider}'."}), 400 + + # Verify project exists and user has access + project = db.projects.find_one({"_id": project_obj_id}, {"ownerId": 1, "collaborators": 1}) + if not project: + return jsonify({"message": "Project not found."}), 404 + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "Access denied to the specified project."}), 403 + + # Prepare initial messages if startMessage exists + now = datetime.datetime.now(datetime.timezone.utc) + messages_array = [] + if start_message: + messages_array.append({ + "role": "user", + "content": start_message, + "timestamp": now # Store timestamp for messages + }) + + # Prepare the new dialog document + dialog_doc = { + "uid": user_id, + "projectId": project_obj_id, + "provider": provider, # Store the provider used for this session + "sessionStartedAt": now, + "sessionEndedAt": None, # Mark as null initially + "messages": messages_array + } + if session_id: dialog_doc["sessionId"] = session_id + + # Insert the new dialog session + result = db.dialog_activity.insert_one(dialog_doc) + + # Return success response with the new dialog ID + return jsonify({ + "message": "Dialog session created successfully.", + "dialog_id": str(result.inserted_id) + }), 201 + + except KeyError: # Should be caught by initial user_id check + logger.error(f"User ID (_id) not found in token payload for create_dialog_session.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error creating dialog session for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "Internal server error creating dialog session."}), 500 + + +@bp.route('//send', methods=['POST']) # Path relative to prefix +@token_required +def send_dialog_message(current_user, dialog_id): + """ + Sends a user message within a specific dialog session. + Uses DialogSendMessageSchema for input validation. + Expects JSON: { "content": "User's message text" } + Retrieves context using vector search, builds a prompt, calls the LLM (Gemini), + and stores the conversation turn in the dialog history. + """ + logger = _get_logger() + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not genai or not google_exceptions: return jsonify({"message": "Gemini API library not available."}), 500 + if not DialogSendMessageSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + try: + # Validate IDs + user_id_str = str(current_user.get("_id")) + if not user_id_str: return jsonify({"message": "Missing user ID in token."}), 400 + try: + user_id = ObjectId(user_id_str) + dialog_obj_id = ObjectId(dialog_id) + except InvalidId: + return jsonify({"message": "Invalid user or dialog ID format."}), 400 + + # Get and validate user message content using schema + json_data = request.get_json() or {} + schema = DialogSendMessageSchema() + try: + validated_data = schema.load(json_data) + except ValidationError as err: + logger.warning(f"Send dialog message validation failed: {err.messages}") + return jsonify(err.messages), 422 + + content = validated_data['content'] # Use validated content + + # --- Retrieve Dialog and API Key --- + db = mongo.db + dialog_doc = db.dialog_activity.find_one({"_id": dialog_obj_id, "uid": user_id}) + if not dialog_doc: return jsonify({"message": "Dialog session not found or access denied."}), 404 + if dialog_doc.get("sessionEndedAt"): return jsonify({"message": "This dialog session has ended."}), 409 # 409 Conflict + + provider = dialog_doc.get("provider") + if provider != "Gemini": # This endpoint currently only supports Gemini + return jsonify({"message": f"This endpoint only supports 'Gemini', but session provider is '{provider}'."}), 400 + + # Find the active Gemini API key for the user + api_doc = db.api_list.find_one({"uid": user_id, "name": "Gemini", "selected": True}) + if not (api_doc and api_doc.get("key")): + logger.error(f"No valid Gemini key found or selected for user {user_id} during send message.") + return jsonify({"message": "Gemini API key not configured or selected."}), 400 + gemini_key = api_doc["key"] + + # --- Build Prompt with Vector Knowledge --- + project_id = dialog_doc.get("projectId") + if not project_id or not isinstance(project_id, ObjectId): + logger.error(f"Dialog {dialog_id} is missing valid projectId.") + return jsonify({"message": "Internal error: Project reference missing."}), 500 + + # This builds the prompt incorporating vector search results + detailed_prompt = build_vector_based_prompt_with_knowledge(content, project_id) + + # Prepare history for Gemini (currently just the detailed prompt as a single user turn) + gemini_history = [{"role": "user", "parts": [{"text": detailed_prompt}]}] + + # --- Call Gemini API --- + llm_response_text = "[LLM Call Skipped/Failed]" # Default response text + try: + genai.configure(api_key=gemini_key) + model = genai.GenerativeModel(current_app.config["GEMINI_MODEL_NAME"]) + # Consider adding generation_config and safety_settings from Flask config + llm_response = model.generate_content(gemini_history) + + # Extract text, handling potential blocks or empty responses + try: + llm_response_text = llm_response.text + except ValueError: + logger.warning(f"Gemini response for dialog {dialog_id} may have been blocked or empty. Feedback: {llm_response.prompt_feedback}") + llm_response_text = "[Response blocked by safety filters or returned no text content]" + + except google_exceptions.PermissionDenied as ex: + logger.warning(f"Gemini Permission Denied for user {user_id}: {ex}") + return jsonify({"message": "Gemini API Error: Invalid API key or insufficient permissions."}), 403 + except google_exceptions.ResourceExhausted as ex: + logger.warning(f"Gemini Resource Exhausted for user {user_id}: {ex}") + return jsonify({"message": "Gemini API Error: Rate limit or quota exceeded."}), 429 + except google_exceptions.GoogleAPIError as ex: # Catch other Google API errors + logger.error(f"Gemini API communication error for user {user_id}: {ex}", exc_info=True) + return jsonify({"message": "An error occurred while communicating with the Gemini API."}), 503 # 503 Service Unavailable + except Exception as e: # Catch potential genai configuration errors etc. + logger.error(f"Unexpected error during Gemini call setup or execution for user {user_id}: {e}", exc_info=True) + return jsonify({"message": "Internal server error during LLM communication."}), 500 + + + # --- Process Response and Update DB --- + now = datetime.datetime.now(datetime.timezone.utc) + user_msg_entry = {"role": "user", "content": content, "timestamp": now} + system_msg_entry = {"role": "system", "content": llm_response_text, "timestamp": now} # Use same timestamp for pair + + # Add both messages to the dialog history in MongoDB atomically + update_res = db.dialog_activity.update_one( + {"_id": dialog_obj_id}, + {"$push": {"messages": {"$each": [user_msg_entry, system_msg_entry]}}} + ) + if update_res.modified_count != 1: + logger.warning(f"Dialog {dialog_id} DB update failed after LLM call (modified_count={update_res.modified_count}).") + # Decide if this should be an error response to the user + + # Process the response (e.g., update cached knowledge) + process_api_response_and_update_knowledge(llm_response_text, project_id) + + # Return the LLM's response text to the client + return jsonify({"message": "LLM response received.", "llmResponse": llm_response_text}), 200 + + except KeyError: # Should be caught by initial user_id check + logger.error(f"User ID (_id) not found in token payload for send_dialog_message.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo or other extension not initialized correctly.") + return jsonify({"message": "Server configuration error."}), 500 + except Exception as e: + logger.error(f"Unexpected error in send_dialog_message for dialog {dialog_id}: {e}", exc_info=True) + return jsonify({"message": "Internal server error processing message."}), 500 + + +@bp.route('/', methods=['GET']) # Path relative to prefix +@token_required +def list_dialog_sessions(current_user): + """ + Lists dialog sessions for the authenticated user. + Uses DialogSummarySchema for output serialization. + Supports filtering by 'projectId' query parameter. + Excludes the 'messages' array for brevity. + """ + logger = _get_logger() + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not DialogSummarySchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + try: + # Validate user ID + user_id_str = str(current_user.get("_id")) + if not user_id_str: return jsonify({"message": "Missing user ID in token."}), 400 + try: + user_id = ObjectId(user_id_str) + except InvalidId: + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Base query for the user's dialogs + query = {"uid": user_id} + + # Add projectId filter if provided in query parameters + project_id_str = request.args.get("projectId") + if project_id_str: + try: + project_obj_id = ObjectId(project_id_str) + query["projectId"] = project_obj_id + except InvalidId: + return jsonify({"message": "Invalid projectId format in query parameter."}), 400 + + # Fetch dialogs, excluding the messages field, sort by start time descending + db = mongo.db + cursor = db.dialog_activity.find( + query, + {"messages": 0} # Projection to exclude messages + ).sort("sessionStartedAt", -1) + + dialog_docs = list(cursor) # Convert cursor to list + + # --- Serialize results using the schema --- + output_schema = DialogSummarySchema(many=True) + # Schema handles ObjectId and datetime conversion, and field exclusion + serialized_result = output_schema.dump(dialog_docs) + + return jsonify({"dialogs": serialized_result}), 200 + + except KeyError: # Should be caught by initial user_id check + logger.error(f"User ID (_id) not found in token payload for list_dialog_sessions.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error listing dialogs for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "Internal server error listing dialog sessions."}), 500 + + +@bp.route('/', methods=['GET']) # Path relative to prefix +@token_required +def get_dialog_session(current_user, dialog_id): + """ + Retrieves the full details of a specific dialog session, including messages. + Uses DialogSchema for output serialization. Verifies ownership. + """ + logger = _get_logger() + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not DialogSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + try: + # Validate IDs + user_id_str = str(current_user.get("_id")) + if not user_id_str: return jsonify({"message": "Missing user ID in token."}), 400 + try: + user_id = ObjectId(user_id_str) + dial_obj_id = ObjectId(dialog_id) + except InvalidId: + return jsonify({"message": "Invalid user or dialog ID format."}), 400 + + # Find the specific dialog owned by the user + db = mongo.db + doc = db.dialog_activity.find_one({"_id": dial_obj_id, "uid": user_id}) + if not doc: + return jsonify({"message": "Dialog session not found or access denied."}), 404 + + # --- Serialize results using the schema --- + output_schema = DialogSchema() + # Schema handles ObjectId, datetime, and nested message formatting + serialized_result = output_schema.dump(doc) + + return jsonify(serialized_result), 200 + + except KeyError: # Should be caught by initial user_id check + logger.error(f"User ID (_id) not found in token payload for get_dialog_session.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error retrieving dialog {dialog_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "Internal server error retrieving dialog session."}), 500 + + +@bp.route('//end', methods=['PUT']) # Path relative to prefix +@token_required +def end_dialog_session(current_user, dialog_id): + """ + Marks a dialog session as ended by setting the 'sessionEndedAt' timestamp. + Prevents ending an already ended session. Verifies ownership. + (No schema needed for input/output here) + """ + logger = _get_logger() + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + + try: + # Validate IDs + user_id_str = str(current_user.get("_id")) + if not user_id_str: return jsonify({"message": "Missing user ID in token."}), 400 + try: + user_id = ObjectId(user_id_str) + dial_obj_id = ObjectId(dialog_id) + except InvalidId: + return jsonify({"message": "Invalid user or dialog ID format."}), 400 + + # Check if session exists, belongs to user, and is not already ended + db = mongo.db + existing_doc = db.dialog_activity.find_one({"_id": dial_obj_id, "uid": user_id}, {"sessionEndedAt": 1}) + if not existing_doc: + return jsonify({"message": "Dialog session not found or access denied."}), 404 + if existing_doc.get("sessionEndedAt") is not None: + # 409 Conflict - the session is already in the 'ended' state + return jsonify({"message": "Dialog session has already been ended."}), 409 + + # Update the document to set the end time + now = datetime.datetime.now(datetime.timezone.utc) + result = db.dialog_activity.update_one( + {"_id": dial_obj_id, "uid": user_id, "sessionEndedAt": None}, # Ensure it's not already ended atomically + {"$set": {"sessionEndedAt": now}} + ) + + # Check if the update was successful + if result.modified_count == 1: + return jsonify({"message": "Dialog session marked as ended."}), 200 + elif result.matched_count == 1 and result.modified_count == 0: + # This could happen if the session was ended between find_one and update_one (race condition) + logger.warning(f"Dialog {dialog_id} was already ended before update (race condition?).") + return jsonify({"message": "Dialog session was already ended."}), 409 + else: # matched_count == 0 (shouldn't happen if find_one worked unless deleted concurrently) + logger.warning(f"Dialog {dialog_id} matched 0 for ending update.") + return jsonify({"message": "Dialog session not found or already ended."}), 404 + + + except KeyError: # Should be caught by initial user_id check + logger.error(f"User ID (_id) not found in token payload for end_dialog_session.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error ending dialog {dialog_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "Internal server error ending dialog session."}), 500 + + +@bp.route('/', methods=['DELETE']) # Path relative to prefix +@token_required +def delete_dialog_session(current_user, dialog_id): + """ + Deletes an entire dialog session document. Verifies ownership. + (No schema needed for input/output here) + """ + logger = _get_logger() + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + + try: + # Validate IDs + user_id_str = str(current_user.get("_id")) + if not user_id_str: return jsonify({"message": "Missing user ID in token."}), 400 + try: + user_id = ObjectId(user_id_str) + dial_obj_id = ObjectId(dialog_id) + except InvalidId: + return jsonify({"message": "Invalid user or dialog ID format."}), 400 + + # Perform deletion, ensuring the user owns the dialog + db = mongo.db + result = db.dialog_activity.delete_one({"_id": dial_obj_id, "uid": user_id}) + + # Check if a document was deleted + if result.deleted_count == 1: + return jsonify({"message": "Dialog session deleted successfully."}), 200 # 200 OK or 204 No Content + else: + # If deleted_count is 0, the document either didn't exist or didn't belong to the user + return jsonify({"message": "Dialog session not found or access denied."}), 404 + + except KeyError: # Should be caught by initial user_id check + logger.error(f"User ID (_id) not found in token payload for delete_dialog_session.") + return jsonify({"message": "Authentication token is invalid or missing user ID."}), 401 + except AttributeError: + logger.error("PyMongo extension not initialized or attached correctly.") + return jsonify({"message": "Database configuration error."}), 500 + except Exception as e: + logger.error(f"Error deleting dialog {dialog_id} for user {current_user.get('_id', 'UNKNOWN')}: {e}", exc_info=True) + return jsonify({"message": "Internal server error deleting dialog session."}), 500 + diff --git a/backend_flask/myapp/extensions.py b/backend_flask/myapp/extensions.py new file mode 100644 index 0000000..9ca49dc --- /dev/null +++ b/backend_flask/myapp/extensions.py @@ -0,0 +1,24 @@ +# backend/myapp/extensions.py + +""" +Central place to instantiate Flask extension objects. +These objects are initialized with the app instance later in the application factory. +""" + +from flask_pymongo import PyMongo +from flask_cors import CORS + +# from flask_jwt_extended import JWTManager +from flask_marshmallow import Marshmallow + +# Add other necessary extension imports (e.g., Migrate if using SQL + Alembic) + +# Instantiate extensions without the app object +mongo = PyMongo() +cors = CORS() +# jwt = JWTManager() +ma = Marshmallow() +# migrate = Migrate() # Example if using Flask-Migrate + +# You can add other globally used utilities here if needed, +# but primarily focus on Flask extensions. diff --git a/backend_flask/myapp/models.py b/backend_flask/myapp/models.py new file mode 100644 index 0000000..8eca624 --- /dev/null +++ b/backend_flask/myapp/models.py @@ -0,0 +1,4 @@ +from flask_pymongo import PyMongo + +# Create a global PyMongo instance. +mongo = PyMongo() diff --git a/backend_flask/myapp/projects/__init__.py b/backend_flask/myapp/projects/__init__.py new file mode 100644 index 0000000..dc458e8 --- /dev/null +++ b/backend_flask/myapp/projects/__init__.py @@ -0,0 +1,13 @@ +# myapp/projects/__init__.py + +from flask import Blueprint + +# Define the Blueprint instance for the projects module. +# 'projects' is the unique name for this blueprint. +# url_prefix='/api/projects' will be prepended to all routes defined in this blueprint. +bp = Blueprint('projects', __name__, url_prefix='/api/projects') + +# Import the routes module. +# This connects the routes defined in routes.py to the 'bp' instance. +# This import MUST come AFTER the Blueprint 'bp' is defined. +from . import projects_routes diff --git a/backend_flask/myapp/projects/projects_routes.py b/backend_flask/myapp/projects/projects_routes.py new file mode 100644 index 0000000..40316f3 --- /dev/null +++ b/backend_flask/myapp/projects/projects_routes.py @@ -0,0 +1,715 @@ +# myapp/projects/projects_routes.py + +import datetime +import os # Needed for checking environment variables (e.g., for OpenAI key) +import logging +from flask import request, jsonify, current_app, has_app_context # Flask utilities +from bson.objectid import ObjectId, InvalidId # For MongoDB ObjectIds +from collections import defaultdict # May be used in helper logic +from functools import wraps # Import wraps for dummy decorator + + +# --- Local Blueprint Import --- +from . import bp # Import the 'bp' instance defined in the local __init__.py + +# --- Shared Extensions and Utilities Imports --- +try: + from ..extensions import mongo # Import the initialized PyMongo instance + # Import utilities from the parent 'myapp/utils.py' + from ..utils import token_required, generate_passkey +except ImportError: + # Fallback or error handling if imports fail + print("Warning: Could not import mongo, token_required, or generate_passkey in projects/projects_routes.py.") + mongo = None + generate_passkey = lambda: "error_generating_passkey" # Dummy function + # Define a dummy decorator if token_required is missing + def token_required(f): + @wraps(f) + def wrapper(*args, **kwargs): + print("ERROR: token_required decorator is not available!") + return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500 + return wrapper + +# --- Schema Imports --- +try: + # Import the relevant schemas defined in schemas.py + from ..schemas import ( + ProjectCreateSchema, ProjectUpdateSchema, ProjectSchema, + ProjectListSchema # Use ProjectListSchema for the list endpoint + ) + from marshmallow import ValidationError +except ImportError: + print("Warning: Could not import Project schemas or ValidationError in projects/projects_routes.py.") + ProjectCreateSchema = None + ProjectUpdateSchema = None + ProjectSchema = None + ProjectListSchema = None + ValidationError = None + +# --- Celery Task Import --- +# IMPORTANT: Assumes the project root directory ('your_fullstack_project/') is in PYTHONPATH +try: + from backend_flask.celery_worker.celery_app import async_recalc_project_keywords +except ModuleNotFoundError: + print("Warning: Could not import 'async_recalc_project_keywords' from 'celery_worker'. Ensure project root is in PYTHONPATH.") + # Define a dummy task function to prevent NameError if Celery isn't set up + def _dummy_celery_task(*args, **kwargs): + task_name = args[0] if args else 'dummy_task' + print(f"ERROR: Celery task {task_name} not available!") + class DummyTask: + def __init__(self, name): + self.__name__ = name + def delay(self, *a, **kw): + print(f"ERROR: Tried to call delay() on dummy task {self.__name__}") + pass + return DummyTask(task_name) + async_recalc_project_keywords = _dummy_celery_task('async_recalc_project_keywords') + + +# --- Dialog Helper Import --- +# Import the helper function from the sibling 'dialog' blueprint's routes module +try: + # Assumes the function is defined in myapp/dialog/dialog_routes.py + from ..dialog.dialog_routes import generate_knowledge_base_message +except ImportError: + print("Warning: Could not import 'generate_knowledge_base_message' from dialog blueprint.") + # Define a dummy function + generate_knowledge_base_message = lambda pid: "Error: Knowledge base function not available." + +# --- External Lib Imports (for summarize_project) --- +# Import conditionally to avoid errors if not installed +try: + import google.generativeai as genai + from google.api_core import exceptions as google_exceptions +except ImportError: + print("Warning: google.generativeai not installed. Project summarization will fail.") + genai = None + google_exceptions = None + +# --- Helper to get logger safely --- +def _get_logger(): + if has_app_context(): + return current_app.logger + return logging.getLogger(__name__) + +# Note: Routes use paths relative to the '/api/projects' prefix defined in __init__.py. + +@bp.route('/', methods=['POST']) # Path relative to prefix +@token_required +def create_project(current_user): + """ + Create a new project for the authenticated user. + Uses ProjectCreateSchema for input validation. + Expects 'name' and optional 'topic', 'description' in JSON payload. + Generates a unique passkey for the project. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + logger.error("Invalid current_user object received in create_project") + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in create_project: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not ProjectCreateSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + # Get and validate JSON data using the schema + json_data = request.get_json() or {} + schema = ProjectCreateSchema() + try: + validated_data = schema.load(json_data) + except ValidationError as err: + logger.warning(f"Create project validation failed: {err.messages}") + return jsonify(err.messages), 422 # Return validation errors + + # Extract validated data + name = validated_data['name'] # Required field + topic = validated_data.get('topic', "") # Optional field from schema + description = validated_data.get('description', "") # Optional field from schema + + try: + # Generate a passkey for potential sharing/joining later + passkey = generate_passkey() + db = mongo.db # Use imported mongo instance + + # Prepare project document data + now = datetime.datetime.now(datetime.timezone.utc) + project_data = { + "ownerId": user_id, + "collaborators": [], # Initially empty collaborator list + "passkey": passkey, # Store the generated passkey + "name": name.strip(), # Use validated and trimmed name + "topic": topic, + "description": description, + "summary": "", # Initial empty summary + "keywords": [], # Initial empty keywords + "lastActivityBy": user_id, # Owner is the last active initially + "createdAt": now, + "updatedAt": now + } + + # Insert the new project document + result = db.projects.insert_one(project_data) + project_id = str(result.inserted_id) + + # Return success response with project ID and passkey + return jsonify({ + "message": "Project created successfully.", + "project_id": project_id, + "passkey": passkey # Return passkey so owner knows it + }), 201 # 201 Created status code + + except Exception as e: + # Log the detailed error for debugging + logger.error(f"Error creating project for user {user_id}: {e}", exc_info=True) + # Return a generic error message to the client + return jsonify({"message": "An error occurred while creating the project."}), 500 + + +@bp.route('/', methods=['GET']) # Path relative to prefix +@token_required +def get_projects(current_user): + """ + Retrieve a summary list (ID, name, updatedAt) of projects where the + authenticated user is either the owner or a collaborator. + Uses ProjectListSchema for output serialization. + Sorted by last update time descending. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in get_projects: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not ProjectListSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + try: + db = mongo.db + # Query for projects owned by or collaborated on by the user + projects_cursor = db.projects.find( + { + "$or": [ + {"ownerId": user_id}, + {"collaborators": user_id} # Check if user ID is in the collaborators array + ] + }, + # Projection: only retrieve fields needed by the ProjectListSchema + {"name": 1, "updatedAt": 1, "_id": 1} + ).sort("updatedAt", -1) # Sort by most recently updated + + project_docs = list(projects_cursor) # Convert cursor to list + + # --- Serialize results using the schema --- + output_schema = ProjectListSchema(many=True) + # Schema handles ObjectId and datetime conversion + serialized_result = output_schema.dump(project_docs) + + # Return the serialized list of project summaries + return jsonify({"projects": serialized_result}), 200 + + except Exception as e: + logger.error(f"Error fetching projects for user {user_id}: {e}", exc_info=True) + # Use a generic error message for the client + return jsonify({"message": "An error occurred while fetching projects."}), 500 + + +@bp.route('/', methods=['GET']) # Path relative to prefix +@token_required +def get_project_detail(current_user, project_id): + """ + Retrieve detailed information for a specific project by its ID. + Uses ProjectSchema for output serialization. + Verifies user access (owner or collaborator). + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in get_project_detail: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not ProjectSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + try: + db = mongo.db + # Validate the provided project ID format + try: + obj_project_id = ObjectId(project_id) + except InvalidId: + return jsonify({"message": "Invalid project ID format."}), 400 + + # Find the project by ID + project_doc = db.projects.find_one({"_id": obj_project_id}) + if not project_doc: + return jsonify({"message": "Project not found."}), 404 # 404 Not Found + + # Verify ownership or collaboration status for access control + owner_id = project_doc.get("ownerId") + collaborators = project_doc.get("collaborators", []) + if not owner_id: # Check for data integrity + logger.error(f"Project {project_id} is missing ownerId.") + return jsonify({"message": "Project data integrity issue."}), 500 + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "Access denied to this project."}), 403 # 403 Forbidden + + # --- Serialize results using the schema --- + output_schema = ProjectSchema() + # Schema handles ObjectId, datetime, nested keywords, and field selection + serialized_result = output_schema.dump(project_doc) + + return jsonify(serialized_result), 200 + + except Exception as e: + logger.error(f"Error fetching project detail for {project_id}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while fetching project details."}), 500 + + +@bp.route('/', methods=['PUT']) # Path relative to prefix +@token_required +def update_project(current_user, project_id): + """ + Update details of an existing project. + Uses ProjectUpdateSchema for input validation. + Only allows updating specific fields: name, collaborators, topic, description, keywords. + Requires the authenticated user to be the project owner. + Returns the updated project details using ProjectSchema. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in update_project: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not ProjectUpdateSchema or not ProjectSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + # Get and validate JSON data using the schema + json_data = request.get_json() or {} + schema = ProjectUpdateSchema() + try: + # Load validates allowed fields and their types (like collaborators list of strings) + validated_data = schema.load(json_data) + except ValidationError as err: + logger.warning(f"Update project validation failed: {err.messages}") + return jsonify(err.messages), 422 + + # If validation passed but no valid fields were provided + if not validated_data: + return jsonify({"message": "No valid fields provided for update."}), 400 + + try: + db = mongo.db + # Validate project ID format + try: + obj_project_id = ObjectId(project_id) + except InvalidId: + return jsonify({"message": "Invalid project ID format."}), 400 + + # Find the project + project = db.projects.find_one({"_id": obj_project_id}, {"ownerId": 1}) # Fetch ownerId for check + if not project: + return jsonify({"message": "Project not found."}), 404 + + # Verify ownership for update permission + owner_id = project.get("ownerId") + if not owner_id: + logger.error(f"Project {project_id} is missing ownerId during update.") + return jsonify({"message": "Project data integrity issue."}), 500 + if owner_id != user_id: + return jsonify({"message": "Only the project owner can update this project."}), 403 + + # --- Prepare Update Fields based on validated data --- + update_fields = {} + # Convert collaborator strings back to ObjectIds if present + if "collaborators" in validated_data: + try: + update_fields["collaborators"] = [ObjectId(cid) for cid in validated_data["collaborators"]] + # Optional: Add check here to ensure collaborator IDs exist and are not the owner + except (InvalidId, TypeError): + # This should ideally be caught by schema validation if using _validate_object_id + return jsonify({"message": "Invalid collaborator ID format received."}), 400 + # Copy other validated fields directly + for field in ["name", "topic", "description", "keywords"]: + if field in validated_data: + update_fields[field] = validated_data[field] + + + # Always update the 'updatedAt' timestamp + update_fields["updatedAt"] = datetime.datetime.now(datetime.timezone.utc) + # Note: lastActivityBy is NOT updated here. + + # Perform the update operation + result = db.projects.update_one({"_id": obj_project_id}, {"$set": update_fields}) + + # Check if the update was successful + if result.matched_count == 1: + # Retrieve the updated project document to return it + updated_project_doc = db.projects.find_one({"_id": obj_project_id}) + if updated_project_doc: + # Serialize the updated document using the detail schema + output_schema = ProjectSchema() + serialized_project = output_schema.dump(updated_project_doc) + return jsonify({"message": "Project updated successfully.", "project": serialized_project}), 200 + else: + logger.warning(f"Project {project_id} updated but could not be retrieved.") + return jsonify({"message": "Project updated successfully, but failed to retrieve updated data."}), 200 + else: + # Matched count was 0 + return jsonify({"message": "Project update failed (document not found)."}), 404 + + except Exception as e: + logger.error(f"Error updating project {project_id}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while updating the project."}), 500 + + +@bp.route('/', methods=['DELETE']) # Path relative to prefix +@token_required +def delete_project(current_user, project_id): + """ + Delete a project and cascade deletion of associated URLs, activity logs, and dialogs. + Requires the authenticated user to be the project owner. + (No schema needed for input/output here) + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in delete_project: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + + try: + db = mongo.db + # Validate project ID format + try: + obj_project_id = ObjectId(project_id) + except InvalidId: + return jsonify({"message": "Invalid project ID format."}), 400 + + # Find the project + project = db.projects.find_one({"_id": obj_project_id}, {"ownerId": 1}) + if not project: + return jsonify({"message": "Project not found."}), 404 + + # Verify ownership for delete permission + owner_id = project.get("ownerId") + if not owner_id: + logger.error(f"Project {project_id} is missing ownerId during delete.") + return jsonify({"message": "Project data integrity issue."}), 500 + if owner_id != user_id: + return jsonify({"message": "Only the project owner can delete this project."}), 403 + + # --- Perform Deletions (Consider Transactions if available/needed) --- + + # 1. Delete the project document itself + delete_project_result = db.projects.delete_one({"_id": obj_project_id}) + if delete_project_result.deleted_count == 0: + logger.warning(f"Project {project_id} found but delete_one removed 0 documents.") + return jsonify({"message": "Project deletion failed (already deleted?)."}), 404 + + # 2. Cascade delete associated URLs + delete_urls_result = db.urls.delete_many({"projectId": obj_project_id}) + logger.info(f"Deleted {delete_urls_result.deleted_count} URLs for project {project_id}") + + # 3. Cascade delete associated activity logs + delete_activity_result = db.project_activity.delete_many({"projectId": obj_project_id}) + logger.info(f"Deleted {delete_activity_result.deleted_count} activity logs for project {project_id}") + + # 4. Cascade delete associated dialog sessions + delete_dialog_result = db.dialog_activity.delete_many({"projectId": obj_project_id}) + logger.info(f"Deleted {delete_dialog_result.deleted_count} dialog sessions for project {project_id}") + + # --- End Deletions --- + + return jsonify({"message": "Project and associated data deleted successfully."}), 200 # 200 OK or 204 No Content + + except Exception as e: + logger.error(f"Error deleting project {project_id}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while deleting the project."}), 500 + + +@bp.route('//info', methods=['GET']) # Path relative to prefix +@token_required +def get_project_info(current_user, project_id): + """ + Retrieve basic informational fields for a project. + Uses ProjectSchema for output serialization (implicitly selects fields). + Verifies user access (owner or collaborator). + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in get_project_info: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not ProjectSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + + try: + db = mongo.db + # Validate project ID format + try: + obj_project_id = ObjectId(project_id) + except InvalidId: + return jsonify({"message": "Invalid project ID format."}), 400 + + # Find the project, projecting only necessary fields + access control fields + # Schema will handle final field selection for output + project_doc = db.projects.find_one( + {"_id": obj_project_id} # Fetch full doc for schema dump + # {"name": 1, "topic": 1, "description": 1, "keywords": 1, "summary": 1, "ownerId": 1, "collaborators": 1} + ) + if not project_doc: + return jsonify({"message": "Project not found."}), 404 + + # Verify access + owner_id = project_doc.get("ownerId") + collaborators = project_doc.get("collaborators", []) + if not owner_id: + logger.error(f"Project {project_id} is missing ownerId in get_project_info.") + return jsonify({"message": "Project data integrity issue."}), 500 + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "Access denied to this project's info."}), 403 + + # --- Serialize using ProjectSchema --- + # The schema definition controls which fields are included in the output + output_schema = ProjectSchema() + serialized_result = output_schema.dump(project_doc) + + # The ProjectSchema includes more than just the 'info' fields, + # adjust schema or create ProjectInfoSchema if only specific fields are desired. + # For now, returning the standard ProjectSchema output. + return jsonify(serialized_result), 200 + + except Exception as e: + logger.error(f"Error getting project info for {project_id}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while retrieving project info."}), 500 + + +@bp.route('//recalc_keywords', methods=['PUT']) # Path relative to prefix +@token_required +def recalc_project_keywords(current_user, project_id): + """ + Triggers an asynchronous Celery task to recalculate project keywords. + Verifies user access (owner or collaborator). + (No schema needed for input/output here) + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + user_id_str = str(user_id) # Keep string version for Celery task if needed + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in recalc_project_keywords: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + + try: + db = mongo.db + # Validate project ID format + try: + obj_project_id = ObjectId(project_id) + except InvalidId: + return jsonify({"message": "Invalid project ID format."}), 400 + + # Verify project exists and user has access before queueing task + project = db.projects.find_one( + {"_id": obj_project_id}, + {"ownerId": 1, "collaborators": 1} # Only fetch fields needed for access check + ) + if not project: + return jsonify({"message": "Project not found."}), 404 + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if not owner_id: + logger.error(f"Project {project_id} is missing ownerId in recalc_keywords.") + return jsonify({"message": "Project data integrity issue."}), 500 + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "Access denied to trigger keyword recalculation for this project."}), 403 + + + # --- Queue the Celery Task --- + try: + # Call the .delay() method on the imported Celery task + task_result = async_recalc_project_keywords.delay(project_id, user_id_str) + logger.info(f"Queued keyword recalc task {task_result.id} for project {project_id}") + # Return 202 Accepted status code to indicate task was queued + return jsonify({"message": "Project keywords recalculation task queued successfully."}), 202 + except NameError: + logger.error("Celery task 'async_recalc_project_keywords' is not defined or imported correctly.") + return jsonify({"message": "Server configuration error: Keyword recalculation feature unavailable."}), 500 + except Exception as e: + # Catch errors related to Celery connection or queueing + logger.error(f"Error queueing recalc keywords task for project {project_id}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while queueing the keywords recalculation task."}), 500 + + except Exception as e: + # Catch general errors before task queueing + logger.error(f"Error in recalc_project_keywords endpoint for project {project_id}: {e}", exc_info=True) + return jsonify({"message": "An internal error occurred before queueing the task."}), 500 + + +@bp.route('//summarize', methods=['PUT']) # Path relative to prefix +@token_required +def summarize_project(current_user, project_id): + """ + Generates a summary for the project using its associated URL knowledge base + and an external LLM (Gemini). Updates the project's summary field. + Requires the user to have a selected Gemini API key configured. + Verifies user access (owner or collaborator). + (No schema needed for input, output is summary string) + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in summarize_project: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not genai or not google_exceptions: return jsonify({"message": "Gemini API library not available."}), 500 + + try: + db = mongo.db + # Validate project ID format + try: + obj_project_id = ObjectId(project_id) + except InvalidId: + return jsonify({"message": "Invalid project ID format."}), 400 + + # Verify project exists and user has access + project = db.projects.find_one( + {"_id": obj_project_id}, + {"ownerId": 1, "collaborators": 1} # Only fetch fields needed for access check + ) + if not project: + return jsonify({"message": "Project not found."}), 404 + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if not owner_id: + logger.error(f"Project {project_id} is missing ownerId in summarize_project.") + return jsonify({"message": "Project data integrity issue."}), 500 + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "Access denied to summarize this project."}), 403 + + # --- Check for User's Gemini API Key --- + api_doc = db.api_list.find_one({"uid": user_id, "selected": True, "name": "Gemini"}) + if not (api_doc and api_doc.get("key")): + return jsonify({"message": "Summarization requires a selected Gemini API key. Please configure it in API Keys."}), 400 # 400 Bad Request - missing prereq + gemini_key = api_doc.get("key") + + # --- Generate Knowledge Base and Prompt --- + # Use the imported helper function from the dialog blueprint + kb_message = generate_knowledge_base_message(obj_project_id) # Pass ObjectId + if not kb_message or kb_message.startswith("Error:") : # Handle error from helper + logger.warning(f"Knowledge base generation failed or was empty for project {project_id}. KB: {kb_message}") + kb_message = "No external knowledge base content available for this project." # Fallback + + # Construct the prompt for Gemini + prompt = ( + f"You are an expert research assistant tasked with summarizing a project. " + f"Below is the external knowledge base compiled from websites associated with this project.\n\n" + f"--- External Knowledge Base ---\n{kb_message}\n--- End Knowledge Base ---\n\n" + f"Based ONLY on the provided knowledge base (do not use external information), please generate a concise and comprehensive summary " + f"of the project's main focus, key topics, and potential research directions. Aim for approximately 300 words, maximum 400 words." + ) + + # --- Call Gemini API --- + summary_text = "[Summary generation failed]" # Default + try: + genai.configure(api_key=gemini_key) + # Use the constant defined earlier or get from config + model = genai.GenerativeModel(current_app.config["GEMINI_MODEL_NAME"]) + gemini_input = [{"role": "user", "parts": [{"text": prompt}]}] + # Consider adding safety settings if needed + llm_response = model.generate_content(gemini_input) + # Extract text, handling potential blocks + try: + summary_text = llm_response.text + except ValueError: + logger.warning(f"Gemini response for project {project_id} summary may have been blocked. Feedback: {llm_response.prompt_feedback}") + summary_text = "[Summary generation blocked or failed]" + + except google_exceptions.PermissionDenied as ex: + logger.warning(f"Gemini Permission Denied for user {user_id} during summarization: {ex}") + return jsonify({"message": "Gemini API Error: Invalid API key or insufficient permissions."}), 403 + except google_exceptions.ResourceExhausted as ex: + logger.warning(f"Gemini Resource Exhausted for user {user_id} during summarization: {ex}") + return jsonify({"message": "Gemini API Error: Rate limit or quota exceeded."}), 429 + except google_exceptions.GoogleAPIError as ex: + logger.error(f"Gemini API communication error during summarization for project {project_id}: {ex}", exc_info=True) + return jsonify({"message": "An error occurred while communicating with the Gemini API."}), 503 + except Exception as e: + logger.error(f"Unexpected error during Gemini call setup/execution for project {project_id} summary: {e}", exc_info=True) + return jsonify({"message": "Internal server error during LLM communication."}), 500 + + # Check if the summary is empty after potential blocking + if not summary_text or summary_text == "[Summary generation blocked or failed]": + return jsonify({"message": "Failed to generate summary (LLM returned empty or blocked response)."}), 500 + + # --- Update Project Summary in DB --- + try: + update_result = db.projects.update_one( + {"_id": obj_project_id}, + {"$set": {"summary": summary_text, "updatedAt": datetime.datetime.now(datetime.timezone.utc)}} + ) + if update_result.matched_count == 0: + # Project deleted between find and update? + logger.warning(f"Project {project_id} not found during summary update.") + return jsonify({"message": "Project not found while saving summary."}), 404 + + # Return success response with the generated summary + return jsonify({"message": "Project summary generated and saved successfully.", "summary": summary_text}), 200 + + except Exception as e: + logger.error(f"Error updating project summary in DB for {project_id}: {e}", exc_info=True) + # Inform user summary was generated but not saved + return jsonify({"message": "Summary generated but failed to save to project.", "summary": summary_text}), 500 + + except Exception as e: + # Catch-all for errors before API call or DB update + logger.error(f"Error in summarize_project endpoint for project {project_id}: {e}", exc_info=True) + return jsonify({"message": "An internal error occurred during project summarization."}), 500 + diff --git a/backend_flask/myapp/schemas.py b/backend_flask/myapp/schemas.py new file mode 100644 index 0000000..4ac96c1 --- /dev/null +++ b/backend_flask/myapp/schemas.py @@ -0,0 +1,227 @@ +# myapp/schemas.py + +from marshmallow import fields, validate, ValidationError, Schema, validates_schema +from marshmallow.validate import OneOf +from bson.objectid import ObjectId, InvalidId + +# Import Marshmallow instance from extensions +# Assumes 'ma = Marshmallow()' is defined in myapp/extensions.py +# and initialized in myapp/__init__.py's create_app() +try: + from .extensions import ma +except ImportError: + # Basic fallback if extensions.py or 'ma' instance is missing + print("WARNING: Flask-Marshmallow instance 'ma' not found in extensions. Falling back.") + from flask_marshmallow import Marshmallow + ma = Marshmallow() + +# --- Custom Validators (Optional but useful) --- + +def _validate_object_id(value): + """Validator function to ensure a string is a valid ObjectId.""" + try: + ObjectId(value) + except (InvalidId, TypeError, ValueError): # Catch potential errors + raise ValidationError("Invalid ObjectId format.") + +def _is_alphabetic_or_empty(value): + """Validator for keywords: allows empty string or purely alphabetic.""" + if value is not None and value != "" and not value.isalpha(): + raise ValidationError("Keyword must be alphabetic if not empty.") + return True # Pass validation if empty or alphabetic + + +# --- Base Schema for common fields --- +class BaseSchema(ma.Schema): + """Base schema with common fields like ID and timestamps.""" + # Dump ObjectId as string, read-only + id = fields.Function(lambda obj: str(obj.get("_id")), dump_only=True) + # Dump datetime as ISO 8601 string, read-only + createdAt = fields.DateTime(format='iso', dump_only=True) + updatedAt = fields.DateTime(format='iso', dump_only=True) + +# --- User Schemas (for auth blueprint) --- +class UserRegistrationSchema(ma.Schema): + """Schema for validating user registration input.""" + username = fields.String(required=True, validate=validate.Length(min=3, max=64, error="Username must be between 3 and 64 characters.")) + email = fields.Email(required=True, error="Invalid email format.") # Built-in email validation + password = fields.String(required=True, validate=validate.Length(min=8, error="Password must be at least 8 characters."), load_only=True) # load_only: Input only, never dumped + +class UserLoginSchema(ma.Schema): + """Schema for validating user login input.""" + username = fields.String(required=True) + password = fields.String(required=True, load_only=True) # Input only + +class UserSchema(BaseSchema): + """Schema for serializing user data for output (excluding password).""" + username = fields.String(dump_only=True) + email = fields.Email(dump_only=True) + # Inherits id, createdAt, updatedAt from BaseSchema + + class Meta: + # IMPORTANT: Explicitly exclude the password field (even if hashed) from output + exclude = ("password",) + +class UserUpdateSchema(ma.Schema): + """Schema for validating user account update input.""" + username = fields.String(validate=validate.Length(min=3, max=64)) # Optional update + email = fields.Email() # Optional update + password = fields.String(validate=validate.Length(min=8), load_only=True) # Optional update, input only + +# --- API Key Schemas (for api_keys blueprint) --- +ALLOWED_API_PROVIDERS = ["Gemini", "Deepseek", "Chatgpt"] + +class APIKeyCreateSchema(ma.Schema): + """Schema for validating new API key creation input.""" + name = fields.String(required=True, validate=OneOf(ALLOWED_API_PROVIDERS, error=f"Provider name must be one of: {ALLOWED_API_PROVIDERS}")) + key = fields.String(required=True, validate=validate.Length(min=5, error="API Key seems too short.")) # Basic length check + selected = fields.Boolean(load_default=False) # Default to False if not provided on load + +class APIKeyUpdateSchema(ma.Schema): + """Schema for validating API key update input.""" + # All fields are optional for update + name = fields.String(validate=OneOf(ALLOWED_API_PROVIDERS, error=f"Provider name must be one of: {ALLOWED_API_PROVIDERS}")) + key = fields.String(validate=validate.Length(min=5)) + selected = fields.Boolean() + +class APIKeySchema(BaseSchema): + """Schema for serializing API key data for output.""" + # Inherits id, createdAt, updatedAt + uid = fields.Function(lambda obj: str(obj.get("uid")), dump_only=True) # User ID as string + name = fields.String(dump_only=True) + key = fields.String(dump_only=True) # Consider masking part of the key for security: fields.Function(lambda obj: f"{obj.get('key', '')[:4]}...{obj.get('key', '')[-4:]}" if obj.get('key') else None, dump_only=True) + selected = fields.Boolean(dump_only=True) + +# --- Project Schemas (for projects blueprint) --- +class KeywordSchema(ma.Schema): + """Schema for individual keywords within a project or URL.""" + word = fields.String(required=True, validate=_is_alphabetic_or_empty) # Allow empty string or alphabetic + percentage = fields.Float(required=True, validate=validate.Range(min=0, max=100)) + +class ProjectCreateSchema(ma.Schema): + """Schema for validating new project creation input.""" + name = fields.String(required=True, validate=validate.Length(min=1, max=100, error="Project name must be between 1 and 100 characters.")) + topic = fields.String(validate=validate.Length(max=200)) # Optional topic + description = fields.String(validate=validate.Length(max=1000)) # Optional description + +class ProjectUpdateSchema(ma.Schema): + """Schema for validating project update input.""" + # Only allowed fields are optional + name = fields.String(validate=validate.Length(min=1, max=100)) + topic = fields.String(validate=validate.Length(max=200)) + description = fields.String(validate=validate.Length(max=1000)) + collaborators = fields.List(fields.String(validate=_validate_object_id)) # List of user ID strings + keywords = fields.List(fields.Nested(KeywordSchema)) # List of keyword objects + +class ProjectSchema(BaseSchema): + """Schema for serializing detailed project data for output.""" + # Inherits id, createdAt, updatedAt + ownerId = fields.Function(lambda obj: str(obj.get("ownerId")), dump_only=True) + collaborators = fields.List(fields.Function(lambda oid: str(oid)), dump_only=True) # List of string IDs + passkey = fields.String(dump_only=True) # Only dump passkey if absolutely necessary, usually not needed in GET responses + name = fields.String(dump_only=True) + topic = fields.String(dump_only=True) + description = fields.String(dump_only=True) + summary = fields.String(dump_only=True) + keywords = fields.List(fields.Nested(KeywordSchema), dump_only=True) + lastActivityBy = fields.Function(lambda obj: str(obj.get("lastActivityBy")) if isinstance(obj.get("lastActivityBy"), ObjectId) else None, dump_only=True) + +class ProjectListSchema(ma.Schema): + """Schema for serializing the summary list of projects.""" + id = fields.Function(lambda obj: str(obj.get("_id")), dump_only=True) + name = fields.String(dump_only=True) + updatedAt = fields.DateTime(format='iso', dump_only=True) + +# --- URL Schemas (for urls blueprint) --- +class URLCreateSchema(ma.Schema): + """Schema for validating new URL creation input.""" + url = fields.URL(required=True, schemes={'http', 'https'}, error="Invalid URL format.") # Validate URL format + +class URLUpdateSchema(ma.Schema): + """Schema for validating URL update input (only specific fields).""" + title = fields.String(validate=validate.Length(max=500)) # Optional update + starred = fields.Boolean() # Optional update + note = fields.String() # Optional update + keywords = fields.List(fields.Nested(KeywordSchema)) # Optional update, validate nested structure + +class URLSchema(BaseSchema): + """Schema for serializing detailed URL data for output.""" + # Inherits id, createdAt, updatedAt + projectId = fields.Function(lambda obj: str(obj.get("projectId")), dump_only=True) + url = fields.URL(dump_only=True) + title = fields.String(dump_only=True) + favicon = fields.String(dump_only=True, allow_none=True) + starred = fields.Boolean(dump_only=True) + note = fields.String(dump_only=True) + keywords = fields.List(fields.Nested(KeywordSchema), dump_only=True) + summary = fields.String(dump_only=True) + processingStatus = fields.String(dump_only=True, validate=OneOf(["pending", "processing", "completed", "failed"])) # Optional: validate status + +class URLListSchema(ma.Schema): + """Schema for serializing the simplified list of URLs.""" + id = fields.Function(lambda obj: str(obj.get("_id")), dump_only=True) + title = fields.String(dump_only=True) + url = fields.URL(dump_only=True) + +class URLSearchResultSchema(URLListSchema): + """Schema for search results (same as list for now).""" + pass # Inherits fields from URLListSchema + +# --- Activity Schemas (for activity blueprint) --- +class ActivityCreateSchema(ma.Schema): + """Schema for validating new activity log creation.""" + projectId = fields.String(required=True, validate=_validate_object_id) # Validate as ObjectId string + activityType = fields.String(required=True, validate=validate.Length(min=1)) + message = fields.String(load_default="") # Optional message + +class ActivitySchema(BaseSchema): + """Schema for serializing activity log data.""" + # Inherits id, createdAt + # Note: updatedAt is not typically used for immutable logs + projectId = fields.Function(lambda obj: str(obj.get("projectId")), dump_only=True) + userId = fields.Function(lambda obj: str(obj.get("userId")), dump_only=True) + activityType = fields.String(dump_only=True) + message = fields.String(dump_only=True) + +# --- Dialog Schemas (for dialog blueprint) --- +class MessageSchema(ma.Schema): + """Schema for individual messages within a dialog.""" + role = fields.String(required=True, validate=OneOf(["user", "system"], error="Role must be 'user' or 'system'.")) + content = fields.String(required=True) + timestamp = fields.DateTime(format='iso', dump_only=True) # Only dump timestamp + +class DialogCreateSchema(ma.Schema): + """Schema for validating new dialog session creation.""" + projectId = fields.String(required=True, validate=_validate_object_id) + sessionId = fields.String() # Optional custom session ID + startMessage = fields.String() # Optional initial message + +class DialogSendMessageSchema(ma.Schema): + """Schema for validating user message input when sending to dialog.""" + content = fields.String(required=True, validate=validate.Length(min=1, error="Message content cannot be empty.")) + +class DialogSchema(BaseSchema): + """Schema for serializing detailed dialog session data (including messages).""" + # Inherits id + uid = fields.Function(lambda obj: str(obj.get("uid")), dump_only=True) + projectId = fields.Function(lambda obj: str(obj.get("projectId")), dump_only=True) + provider = fields.String(dump_only=True) + sessionId = fields.String(dump_only=True) # Dump custom session ID if present + sessionStartedAt = fields.DateTime(format='iso', dump_only=True) + sessionEndedAt = fields.DateTime(format='iso', dump_only=True, allow_none=True) # Can be null + messages = fields.List(fields.Nested(MessageSchema), dump_only=True) # Nested list of messages + +class DialogSummarySchema(BaseSchema): + """Schema for serializing dialog session list (excluding messages).""" + # Inherits id + uid = fields.Function(lambda obj: str(obj.get("uid")), dump_only=True) + projectId = fields.Function(lambda obj: str(obj.get("projectId")), dump_only=True) + provider = fields.String(dump_only=True) + sessionId = fields.String(dump_only=True) + sessionStartedAt = fields.DateTime(format='iso', dump_only=True) + sessionEndedAt = fields.DateTime(format='iso', dump_only=True, allow_none=True) + + class Meta: + # Exclude the potentially large messages array for list views + exclude = ("messages",) + diff --git a/backend_flask/myapp/urls/__init__.py b/backend_flask/myapp/urls/__init__.py new file mode 100644 index 0000000..387a1ba --- /dev/null +++ b/backend_flask/myapp/urls/__init__.py @@ -0,0 +1,14 @@ +# myapp/urls/__init__.py + +from flask import Blueprint + +# Define the Blueprint instance for the URL management module. +# 'urls' is the unique name for this blueprint. +# url_prefix='/api' will be prepended to all routes defined in this blueprint. +# Specific paths like '/projects//urls' or '/urls/' will be defined in routes.py. +bp = Blueprint('urls', __name__, url_prefix='/api') + +# Import the routes module. +# This connects the routes defined in routes.py to the 'bp' instance. +# This import MUST come AFTER the Blueprint 'bp' is defined. +from . import urls_routes diff --git a/backend_flask/myapp/urls/urls_routes.py b/backend_flask/myapp/urls/urls_routes.py new file mode 100644 index 0000000..df60ae7 --- /dev/null +++ b/backend_flask/myapp/urls/urls_routes.py @@ -0,0 +1,817 @@ +# myapp/urls/urls_routes.py + +import datetime +import logging +from flask import request, jsonify, current_app, has_app_context # Flask utilities +from bson.objectid import ObjectId, InvalidId # For MongoDB ObjectIds +from collections import defaultdict # Potentially useful for keyword aggregation etc. +from functools import wraps # For creating dummy decorators +import re # For escaping regex characters in search + +# --- Local Blueprint Import --- +from . import bp # Import the 'bp' instance defined in the local __init__.py + +# --- Shared Extensions and Utilities Imports --- +try: + from ..extensions import mongo # Import the initialized PyMongo instance + from ..utils import token_required # Import the authentication decorator +except ImportError: + # Fallback or error handling if imports fail + print("Warning: Could not import mongo or token_required in urls/urls_routes.py.") + mongo = None + # Define a dummy decorator if token_required is missing + def token_required(f): + @wraps(f) + def wrapper(*args, **kwargs): + print("ERROR: token_required decorator is not available!") + return jsonify({"message": "Server configuration error: Missing authentication utility."}), 500 + return wrapper + +# --- Schema Imports --- +try: + # Import the relevant schemas defined in schemas.py + from ..schemas import ( + URLCreateSchema, URLUpdateSchema, URLSchema, + URLListSchema, URLSearchResultSchema + ) + from marshmallow import ValidationError +except ImportError: + print("Warning: Could not import URL schemas or ValidationError in urls/urls_routes.py.") + URLCreateSchema = None + URLUpdateSchema = None + URLSchema = None + URLListSchema = None + URLSearchResultSchema = None + ValidationError = None + +# --- Celery Task Imports --- +# IMPORTANT: Assumes the project root directory ('your_fullstack_project/') is in PYTHONPATH +try: + from backend_flask.celery_worker.celery_app import async_extract_title_and_keywords, async_summarize_url, async_recalc_project_keywords +except ModuleNotFoundError: + print("Warning: Could not import Celery tasks from 'celery_worker'. Ensure project root is in PYTHONPATH.") + # Define dummy task functions to prevent NameError if Celery isn't set up + def _dummy_celery_task(*args, **kwargs): + task_name = args[0] if args else 'dummy_task' + print(f"ERROR: Celery task {task_name} not available!") + class DummyTask: + def __init__(self, name): + self.__name__ = name + def delay(self, *a, **kw): + print(f"ERROR: Tried to call delay() on dummy task {self.__name__}") + pass + return DummyTask(task_name) + + async_extract_title_and_keywords = _dummy_celery_task('async_extract_title_and_keywords') + async_summarize_url = _dummy_celery_task('async_summarize_url') + async_recalc_project_keywords = _dummy_celery_task('async_recalc_project_keywords') + + +# --- Helper to get logger safely --- +def _get_logger(): + if has_app_context(): + return current_app.logger + return logging.getLogger(__name__) + +# Note: Routes use paths relative to the '/api' prefix defined in __init__.py. + +# -------------------------- +# Create URL Endpoint +# Path: POST /api/projects//urls +# -------------------------- +@bp.route('/projects//urls', methods=['POST']) +@token_required +def create_url(current_user, project_id): + """ + Create a new URL entry within a specific project. + Uses URLCreateSchema for input validation. + Expects 'url' and optional fields in JSON payload. + Verifies project access for the authenticated user. + Triggers background Celery tasks for title/keyword extraction and summarization. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + logger.error("Invalid current_user object received in create_url") + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + user_id_str = str(user_id) # Keep string version for Celery tasks + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in create_url: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not URLCreateSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + # Get and validate JSON data using the schema + json_data = request.get_json() or {} + logger.debug(f"create_url called: project_id={project_id}, data={json_data}, user_id={user_id_str}") + schema = URLCreateSchema() + try: + # Validate only the required 'url' field initially + validated_input = schema.load(json_data) + except ValidationError as err: + logger.warning(f"Create URL validation failed: {err.messages}") + return jsonify(err.messages), 422 + + user_url = validated_input['url'] # URL is guaranteed by schema + + try: + # Validate project ID format from URL path + try: + project_obj_id = ObjectId(project_id) + except InvalidId: + return jsonify({"message": "Invalid project ID format in URL path."}), 400 + + # --- Verify Project Access --- + db = mongo.db + project = db.projects.find_one({"_id": project_obj_id}, {"ownerId": 1, "collaborators": 1}) + if not project: + return jsonify({"message": "Project not found."}), 404 + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if not owner_id: + logger.error(f"Project {project_obj_id} is missing ownerId field.") + return jsonify({"message": "Project data integrity issue."}), 500 + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "You do not have access to add URLs to this project."}), 403 + + # --- Prepare URL Document (using original data for optional fields) --- + # Optional fields are taken directly from original data, not schema output here + keywords_data = data.get("keywords", []) # Process keywords manually as before + keywords_converted = [] + if isinstance(keywords_data, list): + for kw in keywords_data: + if isinstance(kw, dict): + word = kw.get("word", "").strip() + if word: + try: + percentage = float(kw.get("percentage", 0.0)) + keywords_converted.append({"word": word, "percentage": percentage}) + except (ValueError, TypeError): + logger.warning(f"Could not convert keyword percentage for word '{word}' during URL creation.") + else: + logger.warning("Non-dict item found in keywords during URL creation.") + + now = datetime.datetime.now(datetime.timezone.utc) + url_doc = { + "projectId": project_obj_id, + "url": user_url, # Use validated URL + "title": data.get("title", "").strip(), + "favicon": data.get("favicon", ""), + "starred": bool(data.get("starred", False)), + "note": data.get("note", "").strip(), + "keywords": keywords_converted, + "summary": data.get("summary", "").strip(), + "processingStatus": "pending", + "createdAt": now, + "updatedAt": now + } + + # Insert the new URL document + result = db.urls.insert_one(url_doc) + new_url_id_str = str(result.inserted_id) + logger.info(f"Successfully inserted URL {new_url_id_str} for project {project_id}") + + # --- Trigger Background Tasks --- + tasks_queued = True + try: + async_extract_title_and_keywords.delay(new_url_id_str, user_id_str) + api_doc = db.api_list.find_one({"uid": user_id, "selected": True, "name": "Gemini"}) + use_gemini = bool(api_doc and api_doc.get("key")) + async_summarize_url.delay(new_url_id_str, user_id_str, use_gemini) + logger.info(f"Queued Celery tasks for URL {new_url_id_str} (use_gemini={use_gemini})") + except NameError as ne: + logger.error(f"Celery tasks not available for URL {new_url_id_str}: {ne}. Processing cannot be initiated.") + tasks_queued = False + except Exception as celery_err: + logger.error(f"Failed to queue Celery tasks for URL {new_url_id_str}: {celery_err}", exc_info=True) + tasks_queued = False + + response_message = "URL created successfully and processing initiated." if tasks_queued else "URL created, but failed to initiate background processing." + return jsonify({"message": response_message, "url_id": new_url_id_str}), 201 + + except Exception as e: + logger.error(f"Error creating URL for project {project_id}: {e}", exc_info=True) + return jsonify({"message": "An internal error occurred while creating the URL."}), 500 + + +# -------------------------- +# List URLs for Project (Simplified) +# Path: GET /api/projects//urls +# -------------------------- +@bp.route('/projects//urls', methods=['GET']) +@token_required +def list_urls_for_project(current_user, project_id): + """ + Retrieve a simplified list (id, title, url) of all URLs within a specific project. + Uses URLListSchema for output serialization. + Verifies user access to the project. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in list_urls_for_project: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not URLListSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + try: + # Validate project ID format from URL path + try: + obj_project_id = ObjectId(project_id) + except InvalidId: + return jsonify({"message": "Invalid project ID format in URL path."}), 400 + + db = mongo.db + # --- Verify Project Access --- + project = db.projects.find_one( + {"_id": obj_project_id}, + {"ownerId": 1, "collaborators": 1} # Projection for access check + ) + if not project: + return jsonify({"message": "Project not found."}), 404 + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "You do not have access to this project's URLs."}), 403 + + # --- Fetch and Serialize URLs --- + # Find URLs for the project, projecting only fields needed by schema + cursor = db.urls.find( + {"projectId": obj_project_id}, + {"_id": 1, "title": 1, "url": 1} # Projection matching URLListSchema + ).sort("updatedAt", -1) # Sort by most recently updated + + url_docs = list(cursor) # Convert cursor to list + + # Serialize using the schema + output_schema = URLListSchema(many=True) + serialized_result = output_schema.dump(url_docs) + + # Return the serialized list of URLs + return jsonify({"urls": serialized_result}), 200 + + except Exception as e: + logger.error(f"Error listing URLs for project {project_id}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while listing URLs."}), 500 + + +# -------------------------- +# Get URL Detail +# Path: GET /api/urls/ +# -------------------------- +@bp.route('/urls/', methods=['GET']) +@token_required +def get_url_detail(current_user, url_id): + """ + Retrieve the full details for a specific URL entry by its ID. + Uses URLSchema for output serialization. + Verifies user access via the associated project. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in get_url_detail: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not URLSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + try: + # Validate URL ID format from URL path + try: + obj_url_id = ObjectId(url_id) + except InvalidId: + return jsonify({"message": "Invalid URL ID format."}), 400 + + db = mongo.db + # Find the URL document + url_doc = db.urls.find_one({"_id": obj_url_id}) + if not url_doc: + return jsonify({"message": "URL not found."}), 404 + + # --- Verify Project Access --- + project_obj_id = url_doc.get("projectId") + if not project_obj_id or not isinstance(project_obj_id, ObjectId): + logger.error(f"URL {url_id} has missing or invalid projectId.") + return jsonify({"message": "URL data integrity issue (missing project link)."}), 500 + + project = db.projects.find_one( + {"_id": project_obj_id}, + {"ownerId": 1, "collaborators": 1} # Projection for access check + ) + if not project: + logger.error(f"Project {project_obj_id} associated with URL {url_id} not found.") + return jsonify({"message": "Associated project not found; cannot verify access."}), 404 # Or 500 + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if not owner_id: + logger.error(f"Project {project_obj_id} is missing ownerId in get_url_detail.") + return jsonify({"message": "Project data integrity issue."}), 500 + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "You do not have access to the project containing this URL."}), 403 + + # --- Serialize and Return URL Details --- + output_schema = URLSchema() + # Schema handles ObjectId, datetime conversion, and field selection + serialized_result = output_schema.dump(url_doc) + + return jsonify(serialized_result), 200 + + except Exception as e: + logger.error(f"Error retrieving URL detail for {url_id}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while retrieving the URL details."}), 500 + + +# -------------------------- +# Update URL +# Path: PUT /api/urls/ +# -------------------------- +@bp.route('/urls/', methods=['PUT']) +@token_required +def update_url(current_user, url_id): + """ + Update specific fields of a URL entry (title, starred, note, keywords). + Uses URLUpdateSchema for input validation. + Verifies user access via the associated project. + Triggers project keyword recalculation if keywords are changed. + Returns simplified updated URL info using URLListSchema. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in update_url: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not URLUpdateSchema or not URLListSchema or not ValidationError: + return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + # Get and validate JSON data using the schema + json_data = request.get_json() or {} + update_schema = URLUpdateSchema() + try: + # Load validates only the allowed fields (title, starred, note, keywords) + validated_data = update_schema.load(json_data) + except ValidationError as err: + logger.warning(f"Update URL validation failed: {err.messages}") + return jsonify(err.messages), 422 + + # If validation passed but no valid fields were provided + if not validated_data: + return jsonify({"message": "No valid fields provided for update."}), 400 + + try: + # Validate URL ID format + try: + obj_url_id = ObjectId(url_id) + except InvalidId: + return jsonify({"message": "Invalid URL ID format."}), 400 + + db = mongo.db + # --- Find URL and Verify Access --- + # Fetch projectId needed for access check + url_doc = db.urls.find_one({"_id": obj_url_id}, {"projectId": 1}) + if not url_doc: + return jsonify({"message": "URL not found."}), 404 + + project_obj_id = url_doc.get("projectId") + if not project_obj_id or not isinstance(project_obj_id, ObjectId): + logger.error(f"URL {url_id} has missing or invalid projectId during update.") + return jsonify({"message": "URL data integrity issue (missing project link)."}), 500 + + project = db.projects.find_one( + {"_id": project_obj_id}, + {"ownerId": 1, "collaborators": 1} # Projection for access check + ) + if not project: + logger.error(f"Project {project_obj_id} associated with URL {url_id} not found during update.") + return jsonify({"message": "Associated project not found; cannot verify access."}), 404 # Or 500 + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if not owner_id: + logger.error(f"Project {project_obj_id} is missing ownerId during URL update.") + return jsonify({"message": "Project data integrity issue."}), 500 + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "You do not have access to update URLs in this project."}), 403 + + # --- Prepare Update Fields from validated data --- + update_fields = validated_data # Use the validated dictionary directly + keywords_changed = "keywords" in update_fields # Check if keywords were part of the update + + # Always update the 'updatedAt' timestamp + update_fields["updatedAt"] = datetime.datetime.now(datetime.timezone.utc) + + # --- Perform Update --- + result = db.urls.update_one({"_id": obj_url_id}, {"$set": update_fields}) + + # --- Return Response --- + if result.matched_count == 1: + # Retrieve the updated URL doc to return simplified info + updated_url_doc = db.urls.find_one( + {"_id": obj_url_id}, + {"_id": 1, "title": 1, "url": 1} # Projection for list schema + ) + if updated_url_doc: + # Serialize using the list schema for consistency + output_schema = URLListSchema() + serialized_url = output_schema.dump(updated_url_doc) + + # Trigger keyword recalc for the project in background if keywords changed + if keywords_changed: + try: + async_recalc_project_keywords.delay(str(project_obj_id), str(user_id)) + logger.info(f"Queued keyword recalc task for project {project_obj_id} after URL {url_id} update.") + except NameError: + logger.error("Celery task 'async_recalc_project_keywords' not available during URL update.") + except Exception as celery_err: + logger.error(f"Failed to queue Celery recalc task for project {project_obj_id} after URL update: {celery_err}", exc_info=True) + + return jsonify({"message": "URL updated successfully.", "url": serialized_url}), 200 + else: + logger.warning(f"URL {url_id} updated but could not be retrieved.") + return jsonify({"message": "URL updated successfully, but failed to retrieve updated data."}), 200 + else: + # Matched count was 0 + return jsonify({"message": "URL update failed (document not found)."}), 404 + + except Exception as e: + logger.error(f"Error updating URL {url_id}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while updating the URL."}), 500 + + +# -------------------------- +# Delete URL +# Path: DELETE /api/urls/ +# -------------------------- +@bp.route('/urls/', methods=['DELETE']) +@token_required +def delete_url(current_user, url_id): + """ + Delete a specific URL entry by its ID. + Verifies user access via the associated project. + Triggers project keyword recalculation after deletion. + (No schema needed for input/output here) + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in delete_url: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check DB connection + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + + try: + # Validate URL ID format + try: + obj_url_id = ObjectId(url_id) + except InvalidId: + return jsonify({"message": "Invalid URL ID format."}), 400 + + db = mongo.db + # --- Find URL and Verify Access --- + # Fetch projectId needed for access check and recalc trigger + url_doc = db.urls.find_one({"_id": obj_url_id}, {"projectId": 1}) + if not url_doc: + return jsonify({"message": "URL not found."}), 404 + + project_obj_id = url_doc.get("projectId") + if not project_obj_id or not isinstance(project_obj_id, ObjectId): + logger.error(f"URL {url_id} has missing or invalid projectId during delete.") + return jsonify({"message": "URL data integrity issue (missing project link)."}), 500 + + project = db.projects.find_one( + {"_id": project_obj_id}, + {"ownerId": 1, "collaborators": 1} # Projection for access check + ) + # If associated project is missing, we cannot verify access, deny deletion. + if not project: + logger.error(f"Project {project_obj_id} associated with URL {url_id} not found during delete.") + return jsonify({"message": "Cannot verify access; associated project missing."}), 403 # Deny access + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if not owner_id: + logger.error(f"Project {project_obj_id} is missing ownerId during URL delete.") + return jsonify({"message": "Project data integrity issue."}), 500 + # Check if user has access rights (owner or collaborator) + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "You do not have permission to delete URLs in this project."}), 403 + + # --- Perform Deletion --- + delete_result = db.urls.delete_one({"_id": obj_url_id}) + + # --- Return Response --- + if delete_result.deleted_count == 1: + # Trigger keyword recalc for the project in background after successful URL deletion + try: + async_recalc_project_keywords.delay(str(project_obj_id), str(user_id)) + logger.info(f"Queued keyword recalc task for project {project_obj_id} after URL {url_id} deletion.") + except NameError: + logger.error("Celery task 'async_recalc_project_keywords' not available during URL deletion.") + except Exception as celery_err: + logger.error(f"Failed to queue Celery recalc task for project {project_obj_id} after URL deletion: {celery_err}", exc_info=True) + # Still return success for the deletion itself + + return jsonify({"message": "URL deleted successfully."}), 200 # 200 OK or 204 No Content + else: + # Document existed (find_one succeeded) but delete failed + logger.error(f"URL {obj_url_id} found but delete_one failed (deleted_count=0).") + return jsonify({"message": "Failed to delete URL (already deleted?)."}), 404 # Or 500 + + except Exception as e: + logger.error(f"Error deleting URL {url_id}: {e}", exc_info=True) + return jsonify({"message": "An error occurred while deleting the URL."}), 500 + + +# -------------------------- +# Celery Task Trigger Endpoints +# Path: PUT /api/urls//extract_title_and_keywords +# Path: PUT /api/urls//summarize +# -------------------------- +@bp.route('/urls//extract_title_and_keywords', methods=['PUT']) +@token_required +def trigger_extract_title_and_keywords(current_user, url_id): + """ + Manually triggers the background task for extracting title and keywords for a URL. + Verifies user access via the associated project. + Sets processingStatus to 'pending'. + (No schema needed for input/output here) + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + user_id_str = str(user_id) # Keep string version for Celery task + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in trigger_extract: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check DB connection + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + + try: + # Validate URL ID format + try: + obj_url_id = ObjectId(url_id) + except InvalidId: + return jsonify({"message": "Invalid URL ID format."}), 400 + + db = mongo.db + # --- Find URL and Verify Access --- + url_doc = db.urls.find_one({"_id": obj_url_id}, {"projectId": 1}) + if not url_doc: + return jsonify({"message": "URL not found."}), 404 + + project_obj_id = url_doc.get("projectId") + if not project_obj_id or not isinstance(project_obj_id, ObjectId): + logger.error(f"URL {url_id} has missing or invalid projectId during trigger_extract.") + return jsonify({"message": "URL data integrity issue (missing project link)."}), 500 + + project = db.projects.find_one( + {"_id": project_obj_id}, + {"ownerId": 1, "collaborators": 1} # Projection for access check + ) + if not project: + logger.error(f"Project {project_obj_id} associated with URL {url_id} not found during trigger_extract.") + return jsonify({"message": "Associated project not found; cannot verify access."}), 404 # Or 500 + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if not owner_id: + logger.error(f"Project {project_obj_id} is missing ownerId during trigger_extract.") + return jsonify({"message": "Project data integrity issue."}), 500 + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "You do not have access to trigger processing for this URL."}), 403 + + # --- Update Status and Queue Task --- + # Set status to pending before queueing + db.urls.update_one({"_id": obj_url_id}, + {"$set": {"processingStatus": "pending", "updatedAt": datetime.datetime.now(datetime.timezone.utc)}}) + + try: + # Queue the Celery task + async_extract_title_and_keywords.delay(url_id, user_id_str) + logger.info(f"Queued title/keyword extraction task for URL {url_id}") + return jsonify({"message": "Title and keyword extraction task queued successfully."}), 202 # 202 Accepted + except NameError: + logger.error("Celery task 'async_extract_title_and_keywords' is not defined or imported correctly.") + # Revert status? Or leave as pending with error? Let's leave as pending. + return jsonify({"message": "Server configuration error: Extraction feature unavailable."}), 500 + except Exception as e: + logger.error(f"Error queueing extraction task for URL {url_id}: {e}", exc_info=True) + # Revert status? Or leave as pending with error? Let's leave as pending. + return jsonify({"message": "An error occurred while queueing the extraction task."}), 500 + + except Exception as e: + logger.error(f"Error in trigger_extract_title_and_keywords endpoint for URL {url_id}: {e}", exc_info=True) + return jsonify({"message": "An internal error occurred before queueing the task."}), 500 + + +@bp.route('/urls//summarize', methods=['PUT']) +@token_required +def trigger_summarize_url(current_user, url_id): + """ + Manually triggers the background task for summarizing a URL. + Verifies user access via the associated project. + Determines whether to use Gemini based on user's selected API key. + Sets processingStatus to 'pending'. + (No schema needed for input/output here) + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + user_id_str = str(user_id) # Keep string version for Celery task + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in trigger_summarize: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check DB connection + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + + try: + # Validate URL ID format + try: + obj_url_id = ObjectId(url_id) + except InvalidId: + return jsonify({"message": "Invalid URL ID format."}), 400 + + db = mongo.db + # --- Find URL and Verify Access --- + url_doc = db.urls.find_one({"_id": obj_url_id}, {"projectId": 1}) + if not url_doc: + return jsonify({"message": "URL not found."}), 404 + + project_obj_id = url_doc.get("projectId") + if not project_obj_id or not isinstance(project_obj_id, ObjectId): + logger.error(f"URL {url_id} has missing or invalid projectId during trigger_summarize.") + return jsonify({"message": "URL data integrity issue (missing project link)."}), 500 + + project = db.projects.find_one( + {"_id": project_obj_id}, + {"ownerId": 1, "collaborators": 1} # Projection for access check + ) + if not project: + logger.error(f"Project {project_obj_id} associated with URL {url_id} not found during trigger_summarize.") + return jsonify({"message": "Associated project not found; cannot verify access."}), 404 # Or 500 + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if not owner_id: + logger.error(f"Project {project_obj_id} is missing ownerId during trigger_summarize.") + return jsonify({"message": "Project data integrity issue."}), 500 + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "You do not have access to trigger processing for this URL."}), 403 + + # --- Update Status, Check API Key, and Queue Task --- + # Set status to pending before queueing + db.urls.update_one({"_id": obj_url_id}, + {"$set": {"processingStatus": "pending", "updatedAt": datetime.datetime.now(datetime.timezone.utc)}}) + + # Check for user's selected Gemini API key + api_doc = db.api_list.find_one({"uid": user_id, "selected": True, "name": "Gemini"}) + use_gemini = bool(api_doc and api_doc.get("key")) # True if Gemini selected and key exists + + try: + # Queue the Celery task, passing the use_gemini flag + async_summarize_url.delay(url_id, user_id_str, use_gemini) + logger.info(f"Queued summarization task for URL {url_id} (use_gemini={use_gemini})") + return jsonify({"message": "Summarization task queued successfully."}), 202 # 202 Accepted + except NameError: + logger.error("Celery task 'async_summarize_url' is not defined or imported correctly.") + # Revert status? Or leave as pending? Leave as pending. + return jsonify({"message": "Server configuration error: Summarization feature unavailable."}), 500 + except Exception as e: + logger.error(f"Error queueing summarization task for URL {url_id}: {e}", exc_info=True) + # Revert status? Or leave as pending? Leave as pending. + return jsonify({"message": "An error occurred while queueing the summarization task."}), 500 + + except Exception as e: + logger.error(f"Error in trigger_summarize_url endpoint for URL {url_id}: {e}", exc_info=True) + return jsonify({"message": "An internal error occurred before queueing the task."}), 500 + + +# -------------------------- +# Search URLs within Project +# Path: GET /api/projects//search?q=... +# -------------------------- +@bp.route('/projects//search', methods=['GET']) +@token_required +def search_urls(current_user, project_id): + """ + Search for URLs within a specific project based on a query string. + Uses URLSearchResultSchema for output serialization. + Searches 'title', 'note', 'keywords.word', and 'summary' fields using regex. + Returns a simplified list (id, title, url) of matching URLs. + Verifies user access to the project. + """ + logger = _get_logger() + # Validate user object from token + if not current_user or not current_user.get("_id"): + return jsonify({"message": "Internal authorization error."}), 500 + try: + user_id = ObjectId(current_user["_id"]) + except (InvalidId, TypeError) as e: + logger.error(f"User ID conversion error in search_urls: {e}") + return jsonify({"message": "Invalid user ID format in token."}), 400 + + # Check dependencies + if not mongo: return jsonify({"message": "Database connection not available."}), 500 + if not URLSearchResultSchema: return jsonify({"message": "Server configuration error: Schema unavailable."}), 500 + + try: + # Get search query string from query parameters + query_str = request.args.get("q", "").strip() + # If query string is empty, return empty results immediately + if not query_str: + return jsonify({"results": []}), 200 + + # Validate project ID format from URL path + try: + obj_project_id = ObjectId(project_id) + except InvalidId: + return jsonify({"message": "Invalid project ID format in URL path."}), 400 + + db = mongo.db + # --- Verify Project Access --- + project = db.projects.find_one( + {"_id": obj_project_id}, + {"ownerId": 1, "collaborators": 1} # Projection for access check + ) + if not project: + return jsonify({"message": "Project not found."}), 404 + + owner_id = project.get("ownerId") + collaborators = project.get("collaborators", []) + if owner_id != user_id and user_id not in collaborators: + return jsonify({"message": "Access denied to search URLs in this project."}), 403 + + # --- Perform Search using Aggregation Pipeline --- + # Escape regex special characters in the query string for safety + escaped_query = re.escape(query_str) + search_pipeline = [ + {"$match": {"projectId": obj_project_id}}, + {"$match": { + "$or": [ + {"title": {"$regex": escaped_query, "$options": "i"}}, + {"note": {"$regex": escaped_query, "$options": "i"}}, + {"keywords.word": {"$regex": escaped_query, "$options": "i"}}, + {"summary": {"$regex": escaped_query, "$options": "i"}} + ] + }}, + # Project only fields needed by the output schema + {"$project": {"_id": 1, "title": 1, "url": 1, "updatedAt": 1}}, + {"$sort": {"updatedAt": -1}} # Sort by update time + # Add $limit stage if needed + ] + + # Execute the aggregation pipeline + results_cursor = db.urls.aggregate(search_pipeline) + search_result_docs = list(results_cursor) # Convert cursor to list + + # --- Serialize results using the schema --- + output_schema = URLSearchResultSchema(many=True) + # Schema handles ObjectId conversion and field selection + serialized_result = output_schema.dump(search_result_docs) + + # Return the search results + return jsonify({"results": serialized_result}), 200 + + except Exception as e: + logger.error(f"Error searching URLs in project {project_id} with query '{query_str}': {e}", exc_info=True) + return jsonify({"message": "An error occurred during URL search."}), 500 + diff --git a/backend_flask/myapp/utils.py b/backend_flask/myapp/utils.py new file mode 100644 index 0000000..53ca49b --- /dev/null +++ b/backend_flask/myapp/utils.py @@ -0,0 +1,93 @@ +# utils/auth.py (or wherever token_required is defined) + +from functools import wraps +import secrets +import jwt +from flask import request, jsonify, current_app # <-- Import current_app +# Config might still be needed for default algorithm if not in app.config +# from backend.config import Config # Keep if needed for defaults, but prefer current_app.config + +# TODO Flask cannot find config inside the utils +from .config import Config # Example if config.py is in the same dir +from bson.objectid import ObjectId +# Remove direct import of mongo + + +def token_required(f): + """ + Decorator to ensure a valid JWT token is present in the request header + and injects the corresponding user document into the decorated function. + """ + @wraps(f) + def decorated(*args, **kwargs): + token = None + auth_header = request.headers.get("Authorization") + if auth_header: + # Check for "Bearer " prefix and extract token + parts = auth_header.split() + if len(parts) == 2 and parts[0].lower() == "bearer": + token = parts[1] + # Optional: Allow raw token directly (as in original code) + elif len(parts) == 1: + token = auth_header + + if not token: + return jsonify({"message": "Token is missing."}), 401 + + try: + # Use current_app.config to access SECRET_KEY and JWT_ALGORITHM + secret_key = current_app.config['SECRET_KEY'] + # Provide a default algorithm if not explicitly configured + algorithm = current_app.config.get('JWT_ALGORITHM', Config.JWT_ALGORITHM or 'HS256') + + # Decode the token + data = jwt.decode(token, secret_key, algorithms=[algorithm]) + + # --- Use current_app to access mongo --- + user_id_str = data.get("user_id") + if not user_id_str: + return jsonify({"message": "Token payload missing user_id."}), 401 + + # Access the 'users' collection via the mongo instance attached to current_app + current_user_doc = current_app.mongo.db.users.find_one({"_id": ObjectId(user_id_str)}) + # --- End database access change --- + + if not current_user_doc: + # Even if token is valid, user might have been deleted + return jsonify({"message": "User associated with token not found."}), 401 + + # Convert ObjectId back to string for consistency if needed, + # or pass the whole document as is. Passing document is often useful. + # current_user_doc['_id'] = str(current_user_doc['_id']) # Optional conversion + + except jwt.ExpiredSignatureError: + # Specific error for expired token + return jsonify({"message": "Token has expired."}), 401 + except jwt.InvalidTokenError as e: + # Specific error for other JWT validation issues + current_app.logger.warning(f"Invalid token encountered: {e}") # Log the specific error + return jsonify({"message": "Token is invalid."}), 401 + except Exception as e: + # Catch other potential errors (e.g., ObjectId conversion, DB connection issues) + current_app.logger.error(f"Error during token verification: {e}", exc_info=True) + # Return a more generic message for unexpected + return jsonify({"message": "Token verification failed."}), 401 + + # Inject the fetched user document into the decorated function + return f(current_user_doc, *args, **kwargs) + + return decorated + + +# This is a placeholder for background task functions. +# For example, you could use Celery to process URLs asynchronously. +def process_url(url_id): + # Retrieve URL document by url_id, perform scraping, summarization, and update processingStatus. + # This function should be called by a background worker. + pass + + +# This function will generate a pass key for frontend-backend communication +def generate_passkey(): + return secrets.token_hex(16) + diff --git a/backend_flask/requirements.txt b/backend_flask/requirements.txt new file mode 100644 index 0000000..ceeddd3 --- /dev/null +++ b/backend_flask/requirements.txt @@ -0,0 +1,104 @@ +aiohappyeyeballs==2.6.1 +aiohttp==3.11.16 +aiosignal==1.3.2 +amqp==5.3.1 +annotated-types==0.7.0 +anyio==4.9.0 +attrs==25.3.0 +backend==0.2.4.1 +beautifulsoup4==4.13.3 +billiard==4.2.1 +blinker==1.9.0 +cachetools==5.5.2 +celery==5.5.1 +certifi==2025.1.31 +charset-normalizer==3.4.1 +click==8.1.8 +click-didyoumean==0.3.1 +click-plugins==1.1.1 +click-repl==0.3.0 +dataclasses-json==0.6.7 +dnspython==2.7.0 +faiss-cpu==1.10.0 +Flask==3.1.0 +flask-cors==5.0.1 +Flask-JWT-Extended==4.7.1 +flask-marshmallow==1.3.0 +Flask-PyMongo==2.3.0 +frozenlist==1.5.0 +google-ai-generativelanguage==0.6.15 +google-api-core==2.24.2 +google-api-python-client==2.166.0 +google-auth==2.38.0 +google-auth-httplib2==0.2.0 +google-generativeai==0.8.4 +googleapis-common-protos==1.69.2 +greenlet==3.1.1 +grpcio==1.71.0 +grpcio-status==1.71.0 +h11==0.14.0 +httpcore==1.0.8 +httplib2==0.22.0 +httpx==0.28.1 +httpx-sse==0.4.0 +idna==3.10 +importlib-metadata==4.13.0 +iniconfig==2.1.0 +itsdangerous==2.2.0 +Jinja2==3.1.6 +jsonpatch==1.33 +jsonpointer==3.0.0 +kombu==5.5.2 +langchain==0.3.23 +langchain-community==0.3.21 +langchain-core==0.3.51 +langchain-text-splitters==0.3.8 +langsmith==0.3.30 +MarkupSafe==3.0.2 +marshmallow==3.26.1 +multidict==6.4.3 +mypy-extensions==1.0.0 +numpy==1.26.4 +orjson==3.10.16 +packaging==24.2 +pluggy==1.5.0 +prompt_toolkit==3.0.50 +propcache==0.3.1 +proto-plus==1.26.1 +protobuf==5.29.4 +pyasn1==0.6.1 +pyasn1_modules==0.4.2 +pydantic==2.11.3 +pydantic-settings==2.8.1 +pydantic_core==2.33.1 +PyJWT==2.10.1 +pymongo==4.12.0 +pyparsing==3.2.3 +pytest==8.3.5 +python-dateutil==2.9.0.post0 +python-dotenv==1.1.0 +PyYAML==6.0.2 +redis==5.2.1 +repoze.lru==0.7 +requests==2.32.3 +requests-toolbelt==1.0.0 +Routes==2.5.1 +rsa==4.9 +six==1.17.0 +sniffio==1.3.1 +soupsieve==2.6 +SQLAlchemy==2.0.40 +tenacity==9.1.2 +tqdm==4.67.1 +typing-inspect==0.9.0 +typing-inspection==0.4.0 +typing_extensions==4.13.2 +tzdata==2025.2 +uritemplate==4.1.1 +urllib3==2.4.0 +vine==5.1.0 +wcwidth==0.2.13 +Werkzeug==3.1.3 +yarl==1.19.0 +zipp==3.21.0 +zstandard==0.23.0 diff --git a/backend_flask/run.py b/backend_flask/run.py new file mode 100644 index 0000000..87bae48 --- /dev/null +++ b/backend_flask/run.py @@ -0,0 +1,32 @@ +# backend/run.py + + +import os +from myapp import create_app # Import the factory function + +# Determine the configuration to use (e.g., from environment variable) +# Default to 'development' if FLASK_CONFIG is not set +config_name = os.environ.get('FLASK_CONFIG', 'development') + +# Create the Flask app instance using the factory +app = create_app(config_name) + +# Run the development server +if __name__ == "__main__": + # Get host and port from environment variables or use defaults + host = os.environ.get('FLASK_RUN_HOST', '0.0.0.0') + try: + port = int(os.environ.get('FLASK_RUN_PORT', '5000')) + except ValueError: + port = 5000 + + # Use Flask's built-in server for development. + # Debug mode should be controlled by the configuration loaded in create_app. + # app.run() will use app.config['DEBUG'] automatically. + print(f"Starting Flask server on {host}:{port} with config '{config_name}'...") + app.run(host=host, port=port) + +# For production, you would typically use a WSGI server like Gunicorn or uWSGI: +# Example: gunicorn -w 4 -b 0.0.0.0:5000 "run:create_app('production')" + + diff --git a/backend_flask/test_auth_init.py b/backend_flask/test_auth_init.py new file mode 100644 index 0000000..a90e408 --- /dev/null +++ b/backend_flask/test_auth_init.py @@ -0,0 +1,51 @@ +# backend/test_auth_init.py +# Purpose: Directly test the core logic of myapp/auth/__init__.py + +import sys +import os +import traceback + +print("--- Starting test_auth_init.py ---") + +# --- Setup Path --- +# Get the absolute path of the directory containing this script (backend/) +current_dir = os.path.dirname(os.path.abspath(__file__)) +# Get the project root directory (SurfSmart/) +project_root = os.path.dirname(current_dir) +# Add project root to sys.path to allow 'import backend.myapp.auth' later if needed +# and potentially allow flask.Blueprint to resolve correctly if there are path issues. +if project_root not in sys.path: + sys.path.insert(0, project_root) + print(f"Test script added project root: {project_root}") +print(f"Test script current sys.path: {sys.path}") +print(f"Test script current working directory: {os.getcwd()}") + +# --- Test Core Logic --- +bp_instance = None # Initialize to None +try: + print("\nAttempting: from flask import Blueprint") + from flask import Blueprint + print("Successfully imported Blueprint") + + print("\nAttempting: bp = Blueprint('auth', __name__, url_prefix='/api/auth')") + # Use a different variable name just in case 'bp' has weird conflicts + test_bp = Blueprint('auth', __name__, url_prefix='/api/auth') + bp_instance = test_bp # Assign to check later + print(f"Successfully instantiated Blueprint: {test_bp}") + print(f"Type of test_bp: {type(test_bp)}") + +except ImportError as e: + print(f"\nERROR during import: {e}") + traceback.print_exc() +except Exception as e: + print(f"\nUNEXPECTED ERROR during instantiation: {e}") + traceback.print_exc() + +# --- Final Check --- +print("\n--- Final Check ---") +if bp_instance is not None: + print(f"Variable 'bp_instance' was assigned successfully: {bp_instance}") +else: + print("Variable 'bp_instance' was NOT assigned (likely due to error above).") + +print("--- Finished test_auth_init.py ---") diff --git a/frontend_react/.Rhistory b/frontend_react/.Rhistory new file mode 100644 index 0000000..e69de29 diff --git a/frontend_react/.gitignore b/frontend_react/.gitignore new file mode 100644 index 0000000..a547bf3 --- /dev/null +++ b/frontend_react/.gitignore @@ -0,0 +1,24 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? diff --git a/frontend_react/README.md b/frontend_react/README.md new file mode 100644 index 0000000..fd3b758 --- /dev/null +++ b/frontend_react/README.md @@ -0,0 +1,12 @@ +# React + Vite + +This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules. + +Currently, two official plugins are available: + +- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react/README.md) uses [Babel](https://babeljs.io/) for Fast Refresh +- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh + +## Expanding the ESLint configuration + +If you are developing a production application, we recommend using TypeScript and enable type-aware lint rules. Check out the [TS template](https://github.com/vitejs/vite/tree/main/packages/create-vite/template-react-ts) to integrate TypeScript and [`typescript-eslint`](https://typescript-eslint.io) in your project. diff --git a/frontend_react/eslint.config.js b/frontend_react/eslint.config.js new file mode 100644 index 0000000..ec2b712 --- /dev/null +++ b/frontend_react/eslint.config.js @@ -0,0 +1,33 @@ +import js from '@eslint/js' +import globals from 'globals' +import reactHooks from 'eslint-plugin-react-hooks' +import reactRefresh from 'eslint-plugin-react-refresh' + +export default [ + { ignores: ['dist'] }, + { + files: ['**/*.{js,jsx}'], + languageOptions: { + ecmaVersion: 2020, + globals: globals.browser, + parserOptions: { + ecmaVersion: 'latest', + ecmaFeatures: { jsx: true }, + sourceType: 'module', + }, + }, + plugins: { + 'react-hooks': reactHooks, + 'react-refresh': reactRefresh, + }, + rules: { + ...js.configs.recommended.rules, + ...reactHooks.configs.recommended.rules, + 'no-unused-vars': ['error', { varsIgnorePattern: '^[A-Z_]' }], + 'react-refresh/only-export-components': [ + 'warn', + { allowConstantExport: true }, + ], + }, + }, +] diff --git a/frontend_react/index.html b/frontend_react/index.html new file mode 100644 index 0000000..0c589ec --- /dev/null +++ b/frontend_react/index.html @@ -0,0 +1,13 @@ + + + + + + + Vite + React + + +
+ + + diff --git a/frontend_react/package-lock.json b/frontend_react/package-lock.json new file mode 100644 index 0000000..36a597b --- /dev/null +++ b/frontend_react/package-lock.json @@ -0,0 +1,2825 @@ +{ + "name": "surfsmart_react", + "version": "0.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "surfsmart_react", + "version": "0.0.0", + "dependencies": { + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/sortable": "^10.0.0", + "dnd-kit": "^0.0.2", + "react": "^19.0.0", + "react-dom": "^19.0.0", + "react-icons": "^5.5.0" + }, + "devDependencies": { + "@eslint/js": "^9.21.0", + "@types/react": "^19.0.10", + "@types/react-dom": "^19.0.4", + "@vitejs/plugin-react": "^4.3.4", + "eslint": "^9.21.0", + "eslint-plugin-react-hooks": "^5.1.0", + "eslint-plugin-react-refresh": "^0.4.19", + "globals": "^15.15.0", + "vite": "^6.2.0" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.26.8", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.8.tgz", + "integrity": "sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.10.tgz", + "integrity": "sha512-vMqyb7XCDMPvJFFOaT9kxtiRh42GwlZEg1/uIgtZshS5a/8OaduUfCi7kynKgc3Tw/6Uo2D+db9qBttghhmxwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.10", + "@babel/helper-compilation-targets": "^7.26.5", + "@babel/helper-module-transforms": "^7.26.0", + "@babel/helpers": "^7.26.10", + "@babel/parser": "^7.26.10", + "@babel/template": "^7.26.9", + "@babel/traverse": "^7.26.10", + "@babel/types": "^7.26.10", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.0.tgz", + "integrity": "sha512-VybsKvpiN1gU1sdMZIp7FcqphVVKEwcuj02x73uvcHE0PTihx1nlBcowYWhDwjpoAXRv43+gDzyggGnn1XZhVw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.27.0", + "@babel/types": "^7.27.0", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.0.tgz", + "integrity": "sha512-LVk7fbXml0H2xH34dFzKQ7TDZ2G4/rVTOrq9V+icbbadjbVxxeFeDsNHv2SrZeWoA+6ZiTyWYWtScEIW07EAcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.26.8", + "@babel/helper-validator-option": "^7.25.9", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz", + "integrity": "sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.25.9", + "@babel/types": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.26.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz", + "integrity": "sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9", + "@babel/traverse": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.26.5", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.26.5.tgz", + "integrity": "sha512-RS+jZcRdZdRFzMyr+wcsaqOmld1/EqTghfaBGQQd/WnRdzdlvSZ//kF7U8VQTxf1ynZ4cjUcYgjVGx13ewNPMg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", + "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.0.tgz", + "integrity": "sha512-U5eyP/CTFPuNE3qk+WZMxFkp/4zUzdceQlfzf7DdGdhp+Fezd7HD+i8Y24ZuTMKX3wQBld449jijbGq6OdGNQg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.0", + "@babel/types": "^7.27.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.0.tgz", + "integrity": "sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.25.9.tgz", + "integrity": "sha512-y8quW6p0WHkEhmErnfe58r7x0A70uKphQm8Sp8cV7tjNQwK56sNVK0M73LK3WuYmsuyrftut4xAkjjgU0twaMg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.25.9.tgz", + "integrity": "sha512-+iqjT8xmXhhYv4/uiYd8FNQsraMFZIfxVSqxxVSZP0WbbSAWvBXAul0m/zu+7Vv4O/3WtApy9pmaTMiumEZgfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.0.tgz", + "integrity": "sha512-2ncevenBqXI6qRMukPlXwHKHchC7RyMuu4xv5JBXRfOGVcTy1mXCD12qrp7Jsoxll1EV3+9sE4GugBVRjT2jFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/parser": "^7.27.0", + "@babel/types": "^7.27.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.0.tgz", + "integrity": "sha512-19lYZFzYVQkkHkl4Cy4WrAVcqBkgvV2YM2TU3xG6DIwO7O3ecbDPfW3yM3bjAGcqcQHi+CCtjMR3dIEHxsd6bA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.27.0", + "@babel/parser": "^7.27.0", + "@babel/template": "^7.27.0", + "@babel/types": "^7.27.0", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/types": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.0.tgz", + "integrity": "sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@dnd-kit/accessibility": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/accessibility/-/accessibility-3.1.1.tgz", + "integrity": "sha512-2P+YgaXF+gRsIihwwY1gCsQSYnu9Zyj2py8kY5fFvUM1qm2WA2u639R6YNVfU4GWr+ZM5mqEsfHZZLoRONbemw==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/core": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/@dnd-kit/core/-/core-6.3.1.tgz", + "integrity": "sha512-xkGBRQQab4RLwgXxoqETICr6S5JlogafbhNsidmrkVv2YRs5MLwpjoF2qpiGjQt8S9AoxtIV603s0GIUpY5eYQ==", + "license": "MIT", + "dependencies": { + "@dnd-kit/accessibility": "^3.1.1", + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/sortable": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/@dnd-kit/sortable/-/sortable-10.0.0.tgz", + "integrity": "sha512-+xqhmIIzvAYMGfBYYnbKuNicfSsk4RksY2XdmJhT+HAC01nix6fHCztU68jooFiMUB01Ky3F0FyOvhG/BZrWkg==", + "license": "MIT", + "dependencies": { + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "@dnd-kit/core": "^6.3.0", + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/utilities": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@dnd-kit/utilities/-/utilities-3.2.2.tgz", + "integrity": "sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.2.tgz", + "integrity": "sha512-wCIboOL2yXZym2cgm6mlA742s9QeJ8DjGVaL39dLN4rRwrOgOyYSnOaFPhKZGLb2ngj4EyfAFjsNJwPXZvseag==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.2.tgz", + "integrity": "sha512-NQhH7jFstVY5x8CKbcfa166GoV0EFkaPkCKBQkdPJFvo5u+nGXLEH/ooniLb3QI8Fk58YAx7nsPLozUWfCBOJA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.2.tgz", + "integrity": "sha512-5ZAX5xOmTligeBaeNEPnPaeEuah53Id2tX4c2CVP3JaROTH+j4fnfHCkr1PjXMd78hMst+TlkfKcW/DlTq0i4w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.2.tgz", + "integrity": "sha512-Ffcx+nnma8Sge4jzddPHCZVRvIfQ0kMsUsCMcJRHkGJ1cDmhe4SsrYIjLUKn1xpHZybmOqCWwB0zQvsjdEHtkg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.2.tgz", + "integrity": "sha512-MpM6LUVTXAzOvN4KbjzU/q5smzryuoNjlriAIx+06RpecwCkL9JpenNzpKd2YMzLJFOdPqBpuub6eVRP5IgiSA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.2.tgz", + "integrity": "sha512-5eRPrTX7wFyuWe8FqEFPG2cU0+butQQVNcT4sVipqjLYQjjh8a8+vUTfgBKM88ObB85ahsnTwF7PSIt6PG+QkA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.2.tgz", + "integrity": "sha512-mLwm4vXKiQ2UTSX4+ImyiPdiHjiZhIaE9QvC7sw0tZ6HoNMjYAqQpGyui5VRIi5sGd+uWq940gdCbY3VLvsO1w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.2.tgz", + "integrity": "sha512-6qyyn6TjayJSwGpm8J9QYYGQcRgc90nmfdUb0O7pp1s4lTY+9D0H9O02v5JqGApUyiHOtkz6+1hZNvNtEhbwRQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.2.tgz", + "integrity": "sha512-UHBRgJcmjJv5oeQF8EpTRZs/1knq6loLxTsjc3nxO9eXAPDLcWW55flrMVc97qFPbmZP31ta1AZVUKQzKTzb0g==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.2.tgz", + "integrity": "sha512-gq/sjLsOyMT19I8obBISvhoYiZIAaGF8JpeXu1u8yPv8BE5HlWYobmlsfijFIZ9hIVGYkbdFhEqC0NvM4kNO0g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.2.tgz", + "integrity": "sha512-bBYCv9obgW2cBP+2ZWfjYTU+f5cxRoGGQ5SeDbYdFCAZpYWrfjjfYwvUpP8MlKbP0nwZ5gyOU/0aUzZ5HWPuvQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.2.tgz", + "integrity": "sha512-SHNGiKtvnU2dBlM5D8CXRFdd+6etgZ9dXfaPCeJtz+37PIUlixvlIhI23L5khKXs3DIzAn9V8v+qb1TRKrgT5w==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.2.tgz", + "integrity": "sha512-hDDRlzE6rPeoj+5fsADqdUZl1OzqDYow4TB4Y/3PlKBD0ph1e6uPHzIQcv2Z65u2K0kpeByIyAjCmjn1hJgG0Q==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.2.tgz", + "integrity": "sha512-tsHu2RRSWzipmUi9UBDEzc0nLc4HtpZEI5Ba+Omms5456x5WaNuiG3u7xh5AO6sipnJ9r4cRWQB2tUjPyIkc6g==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.2.tgz", + "integrity": "sha512-k4LtpgV7NJQOml/10uPU0s4SAXGnowi5qBSjaLWMojNCUICNu7TshqHLAEbkBdAszL5TabfvQ48kK84hyFzjnw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.2.tgz", + "integrity": "sha512-GRa4IshOdvKY7M/rDpRR3gkiTNp34M0eLTaC1a08gNrh4u488aPhuZOCpkF6+2wl3zAN7L7XIpOFBhnaE3/Q8Q==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.2.tgz", + "integrity": "sha512-QInHERlqpTTZ4FRB0fROQWXcYRD64lAoiegezDunLpalZMjcUcld3YzZmVJ2H/Cp0wJRZ8Xtjtj0cEHhYc/uUg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.2.tgz", + "integrity": "sha512-talAIBoY5M8vHc6EeI2WW9d/CkiO9MQJ0IOWX8hrLhxGbro/vBXJvaQXefW2cP0z0nQVTdQ/eNyGFV1GSKrxfw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.2.tgz", + "integrity": "sha512-voZT9Z+tpOxrvfKFyfDYPc4DO4rk06qamv1a/fkuzHpiVBMOhpjK+vBmWM8J1eiB3OLSMFYNaOaBNLXGChf5tg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.2.tgz", + "integrity": "sha512-dcXYOC6NXOqcykeDlwId9kB6OkPUxOEqU+rkrYVqJbK2hagWOMrsTGsMr8+rW02M+d5Op5NNlgMmjzecaRf7Tg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.2.tgz", + "integrity": "sha512-t/TkWwahkH0Tsgoq1Ju7QfgGhArkGLkF1uYz8nQS/PPFlXbP5YgRpqQR3ARRiC2iXoLTWFxc6DJMSK10dVXluw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.2.tgz", + "integrity": "sha512-cfZH1co2+imVdWCjd+D1gf9NjkchVhhdpgb1q5y6Hcv9TP6Zi9ZG/beI3ig8TvwT9lH9dlxLq5MQBBgwuj4xvA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.2.tgz", + "integrity": "sha512-7Loyjh+D/Nx/sOTzV8vfbB3GJuHdOQyrOryFdZvPHLf42Tk9ivBU5Aedi7iyX+x6rbn2Mh68T4qq1SDqJBQO5Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.2.tgz", + "integrity": "sha512-WRJgsz9un0nqZJ4MfhabxaD9Ft8KioqU3JMinOTvobbX6MOSUigSBlogP8QB3uxpJDsFS6yN+3FDBdqE5lg9kg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.2.tgz", + "integrity": "sha512-kM3HKb16VIXZyIeVrM1ygYmZBKybX8N4p754bw390wGO3Tf2j4L2/WYL+4suWujpgf6GBYs3jv7TyUivdd05JA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.5.1.tgz", + "integrity": "sha512-soEIOALTfTK6EjmKMMoLugwaP0rzkad90iIWd1hMO9ARkSAyjfMfkRRhLvD5qH7vvM0Cg72pieUfR6yh6XxC4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz", + "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/config-array": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.20.0.tgz", + "integrity": "sha512-fxlS1kkIjx8+vy2SjuCB94q3htSNrufYTXubwiBFeaQHbH6Ipi43gFJq2zCMt6PHhImH3Xmr0NksKDvchWlpQQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^2.1.6", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.2.1.tgz", + "integrity": "sha512-RI17tsD2frtDu/3dmI7QRrD4bedNKPM08ziRYaC5AhkGrzIAJelm9kJU1TznK+apx6V+cqRz8tfpEeG3oIyjxw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.12.0.tgz", + "integrity": "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", + "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@eslint/js": { + "version": "9.24.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.24.0.tgz", + "integrity": "sha512-uIY/y3z0uvOGX8cp1C2fiC4+ZmBhp6yZWkojtHL1YEMnRt1Y63HB9TM17proGEmeG7HeUY+UP36F0aknKYTpYA==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/object-schema": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", + "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.2.8", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.8.tgz", + "integrity": "sha512-ZAoA40rNMPwSm+AeHpCq8STiNAwzWLJuP8Xv4CHIc9wv/PSuExjMrmjfYNj682vW0OOiZ1HKxzvjQr9XZIisQA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.13.0", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit/node_modules/@eslint/core": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.13.0.tgz", + "integrity": "sha512-yfkgDw1KR66rkT5A8ci4irzDysN7FRpq3ttJolR88OqQikAWqwA8j5VZyas+vjyBNFIJ7MfybJ9plMILI2UrCw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.6", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz", + "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.3.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz", + "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.2.tgz", + "integrity": "sha512-xeO57FpIu4p1Ri3Jq/EXq4ClRm86dVF2z/+kvFnyqVYRavTZmaFaUBbWCOuuTh0o/g7DSsk6kc2vrS4Vl5oPOQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", + "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.39.0.tgz", + "integrity": "sha512-lGVys55Qb00Wvh8DMAocp5kIcaNzEFTmGhfFd88LfaogYTRKrdxgtlO5H6S49v2Nd8R2C6wLOal0qv6/kCkOwA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.39.0.tgz", + "integrity": "sha512-It9+M1zE31KWfqh/0cJLrrsCPiF72PoJjIChLX+rEcujVRCb4NLQ5QzFkzIZW8Kn8FTbvGQBY5TkKBau3S8cCQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.39.0.tgz", + "integrity": "sha512-lXQnhpFDOKDXiGxsU9/l8UEGGM65comrQuZ+lDcGUx+9YQ9dKpF3rSEGepyeR5AHZ0b5RgiligsBhWZfSSQh8Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.39.0.tgz", + "integrity": "sha512-mKXpNZLvtEbgu6WCkNij7CGycdw9cJi2k9v0noMb++Vab12GZjFgUXD69ilAbBh034Zwn95c2PNSz9xM7KYEAQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.39.0.tgz", + "integrity": "sha512-jivRRlh2Lod/KvDZx2zUR+I4iBfHcu2V/BA2vasUtdtTN2Uk3jfcZczLa81ESHZHPHy4ih3T/W5rPFZ/hX7RtQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.39.0.tgz", + "integrity": "sha512-8RXIWvYIRK9nO+bhVz8DwLBepcptw633gv/QT4015CpJ0Ht8punmoHU/DuEd3iw9Hr8UwUV+t+VNNuZIWYeY7Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.39.0.tgz", + "integrity": "sha512-mz5POx5Zu58f2xAG5RaRRhp3IZDK7zXGk5sdEDj4o96HeaXhlUwmLFzNlc4hCQi5sGdR12VDgEUqVSHer0lI9g==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.39.0.tgz", + "integrity": "sha512-+YDwhM6gUAyakl0CD+bMFpdmwIoRDzZYaTWV3SDRBGkMU/VpIBYXXEvkEcTagw/7VVkL2vA29zU4UVy1mP0/Yw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.39.0.tgz", + "integrity": "sha512-EKf7iF7aK36eEChvlgxGnk7pdJfzfQbNvGV/+l98iiMwU23MwvmV0Ty3pJ0p5WQfm3JRHOytSIqD9LB7Bq7xdQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.39.0.tgz", + "integrity": "sha512-vYanR6MtqC7Z2SNr8gzVnzUul09Wi1kZqJaek3KcIlI/wq5Xtq4ZPIZ0Mr/st/sv/NnaPwy/D4yXg5x0B3aUUA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loongarch64-gnu": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.39.0.tgz", + "integrity": "sha512-NMRUT40+h0FBa5fb+cpxtZoGAggRem16ocVKIv5gDB5uLDgBIwrIsXlGqYbLwW8YyO3WVTk1FkFDjMETYlDqiw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.39.0.tgz", + "integrity": "sha512-0pCNnmxgduJ3YRt+D+kJ6Ai/r+TaePu9ZLENl+ZDV/CdVczXl95CbIiwwswu4L+K7uOIGf6tMo2vm8uadRaICQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.39.0.tgz", + "integrity": "sha512-t7j5Zhr7S4bBtksT73bO6c3Qa2AV/HqiGlj9+KB3gNF5upcVkx+HLgxTm8DK4OkzsOYqbdqbLKwvGMhylJCPhQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.39.0.tgz", + "integrity": "sha512-m6cwI86IvQ7M93MQ2RF5SP8tUjD39Y7rjb1qjHgYh28uAPVU8+k/xYWvxRO3/tBN2pZkSMa5RjnPuUIbrwVxeA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.39.0.tgz", + "integrity": "sha512-iRDJd2ebMunnk2rsSBYlsptCyuINvxUfGwOUldjv5M4tpa93K8tFMeYGpNk2+Nxl+OBJnBzy2/JCscGeO507kA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.39.0.tgz", + "integrity": "sha512-t9jqYw27R6Lx0XKfEFe5vUeEJ5pF3SGIM6gTfONSMb7DuG6z6wfj2yjcoZxHg129veTqU7+wOhY6GX8wmf90dA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.39.0.tgz", + "integrity": "sha512-ThFdkrFDP55AIsIZDKSBWEt/JcWlCzydbZHinZ0F/r1h83qbGeenCt/G/wG2O0reuENDD2tawfAj2s8VK7Bugg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.39.0.tgz", + "integrity": "sha512-jDrLm6yUtbOg2TYB3sBF3acUnAwsIksEYjLeHL+TJv9jg+TmTwdyjnDex27jqEMakNKf3RwwPahDIt7QXCSqRQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.39.0.tgz", + "integrity": "sha512-6w9uMuza+LbLCVoNKL5FSLE7yvYkq9laSd09bwS0tMjkwXrmib/4KmoJcrKhLWHvw19mwU+33ndC69T7weNNjQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.39.0.tgz", + "integrity": "sha512-yAkUOkIKZlK5dl7u6dg897doBgLXmUHhIINM2c+sND3DZwnrdQkkSiDh7N75Ll4mM4dxSkYfXqU9fW3lLkMFug==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.7", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.7.tgz", + "integrity": "sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/estree": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.7.tgz", + "integrity": "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "19.1.1", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.1.tgz", + "integrity": "sha512-ePapxDL7qrgqSF67s0h9m412d9DbXyC1n59O2st+9rjuuamWsZuD2w55rqY12CbzsZ7uVXb5Nw0gEp9Z8MMutQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "csstype": "^3.0.2" + } + }, + "node_modules/@types/react-dom": { + "version": "19.1.2", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.1.2.tgz", + "integrity": "sha512-XGJkWF41Qq305SKWEILa1O8vzhb3aOo3ogBlSmiqNko/WmRb6QIaweuZCXjKygVDXpzXb5wyxKTSOsmkuqj+Qw==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^19.0.0" + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.3.4.tgz", + "integrity": "sha512-SCCPBJtYLdE8PX/7ZQAs1QAZ8Jqwih+0VBLum1EGqmCCQal+MIUqLCzj3ZUy8ufbC0cAM4LRlSTm7IQJwWT4ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.26.0", + "@babel/plugin-transform-react-jsx-self": "^7.25.9", + "@babel/plugin-transform-react-jsx-source": "^7.25.9", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.14.2" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0" + } + }, + "node_modules/acorn": { + "version": "8.14.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", + "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/browserslist": { + "version": "4.24.4", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", + "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "caniuse-lite": "^1.0.30001688", + "electron-to-chromium": "^1.5.73", + "node-releases": "^2.0.19", + "update-browserslist-db": "^1.1.1" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001713", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001713.tgz", + "integrity": "sha512-wCIWIg+A4Xr7NfhTuHdX+/FKh3+Op3LBbSp2N5Pfx6T/LhdQy3GTyoTg48BReaW/MyMNZAkTadsBtai3ldWK0Q==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/csstype": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "dev": true, + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/dnd-kit": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/dnd-kit/-/dnd-kit-0.0.2.tgz", + "integrity": "sha512-d8AYd6I7D2b5u882+QNVGw0slBAt851/LWZ2j/pU+onf5/TGEKXeb47sCyhPYKEAUXp4oLfvWfNCqfkU03R1lw==", + "deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.", + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.136", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.136.tgz", + "integrity": "sha512-kL4+wUTD7RSA5FHx5YwWtjDnEEkIIikFgWHR4P6fqjw1PPLlqYkxeOb++wAauAssat0YClCy8Y3C5SxgSkjibQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/esbuild": { + "version": "0.25.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.2.tgz", + "integrity": "sha512-16854zccKPnC+toMywC+uKNeYSv+/eXkevRAfwRD/G9Cleq66m8XFIrigkbvauLLlCfDL45Q2cWegSg53gGBnQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.2", + "@esbuild/android-arm": "0.25.2", + "@esbuild/android-arm64": "0.25.2", + "@esbuild/android-x64": "0.25.2", + "@esbuild/darwin-arm64": "0.25.2", + "@esbuild/darwin-x64": "0.25.2", + "@esbuild/freebsd-arm64": "0.25.2", + "@esbuild/freebsd-x64": "0.25.2", + "@esbuild/linux-arm": "0.25.2", + "@esbuild/linux-arm64": "0.25.2", + "@esbuild/linux-ia32": "0.25.2", + "@esbuild/linux-loong64": "0.25.2", + "@esbuild/linux-mips64el": "0.25.2", + "@esbuild/linux-ppc64": "0.25.2", + "@esbuild/linux-riscv64": "0.25.2", + "@esbuild/linux-s390x": "0.25.2", + "@esbuild/linux-x64": "0.25.2", + "@esbuild/netbsd-arm64": "0.25.2", + "@esbuild/netbsd-x64": "0.25.2", + "@esbuild/openbsd-arm64": "0.25.2", + "@esbuild/openbsd-x64": "0.25.2", + "@esbuild/sunos-x64": "0.25.2", + "@esbuild/win32-arm64": "0.25.2", + "@esbuild/win32-ia32": "0.25.2", + "@esbuild/win32-x64": "0.25.2" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "9.24.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.24.0.tgz", + "integrity": "sha512-eh/jxIEJyZrvbWRe4XuVclLPDYSYYYgLy5zXGGxD6j8zjSAxFEzI2fL/8xNq6O2yKqVt+eF2YhV+hxjV6UKXwQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.20.0", + "@eslint/config-helpers": "^0.2.0", + "@eslint/core": "^0.12.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.24.0", + "@eslint/plugin-kit": "^0.2.7", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "@types/json-schema": "^7.0.15", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.3.0", + "eslint-visitor-keys": "^4.2.0", + "espree": "^10.3.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-plugin-react-hooks": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.2.0.tgz", + "integrity": "sha512-+f15FfK64YQwZdJNELETdn5ibXEUQmW1DZL6KXhNnc2heoy/sg9VJJeT7n8TlMWouzWqSWavFkIhHyIbIAEapg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0" + } + }, + "node_modules/eslint-plugin-react-refresh": { + "version": "0.4.19", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.19.tgz", + "integrity": "sha512-eyy8pcr/YxSYjBoqIFSrlbn9i/xvxUFa8CjzAYo9cFjgGXqq1hyjihcpZvxRLalpaWmueWR81xn7vuKmAFijDQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "eslint": ">=8.40" + } + }, + "node_modules/eslint-scope": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.3.0.tgz", + "integrity": "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", + "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", + "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.14.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "15.15.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-15.15.0.tgz", + "integrity": "sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/node-releases": { + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", + "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "dev": true, + "license": "MIT" + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/postcss": { + "version": "8.5.3", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.3.tgz", + "integrity": "sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.8", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/react": { + "version": "19.1.0", + "resolved": "https://registry.npmjs.org/react/-/react-19.1.0.tgz", + "integrity": "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "19.1.0", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.1.0.tgz", + "integrity": "sha512-Xs1hdnE+DyKgeHJeJznQmYMIBG3TKIHJJT95Q58nHLSrElKlGQqDTR2HQ9fx5CN/Gk6Vh/kupBTDLU11/nDk/g==", + "license": "MIT", + "dependencies": { + "scheduler": "^0.26.0" + }, + "peerDependencies": { + "react": "^19.1.0" + } + }, + "node_modules/react-icons": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-5.5.0.tgz", + "integrity": "sha512-MEFcXdkP3dLo8uumGI5xN3lDFNsRtrjbOEKDLD7yv76v4wpnEq2Lt2qeHaQOr34I/wPN3s3+N08WkQ+CW37Xiw==", + "license": "MIT", + "peerDependencies": { + "react": "*" + } + }, + "node_modules/react-refresh": { + "version": "0.14.2", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.14.2.tgz", + "integrity": "sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/rollup": { + "version": "4.39.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.39.0.tgz", + "integrity": "sha512-thI8kNc02yNvnmJp8dr3fNWJ9tCONDhp6TV35X6HkKGGs9E6q7YWCHbe5vKiTa7TAiNcFEmXKj3X/pG2b3ci0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.7" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.39.0", + "@rollup/rollup-android-arm64": "4.39.0", + "@rollup/rollup-darwin-arm64": "4.39.0", + "@rollup/rollup-darwin-x64": "4.39.0", + "@rollup/rollup-freebsd-arm64": "4.39.0", + "@rollup/rollup-freebsd-x64": "4.39.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.39.0", + "@rollup/rollup-linux-arm-musleabihf": "4.39.0", + "@rollup/rollup-linux-arm64-gnu": "4.39.0", + "@rollup/rollup-linux-arm64-musl": "4.39.0", + "@rollup/rollup-linux-loongarch64-gnu": "4.39.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.39.0", + "@rollup/rollup-linux-riscv64-gnu": "4.39.0", + "@rollup/rollup-linux-riscv64-musl": "4.39.0", + "@rollup/rollup-linux-s390x-gnu": "4.39.0", + "@rollup/rollup-linux-x64-gnu": "4.39.0", + "@rollup/rollup-linux-x64-musl": "4.39.0", + "@rollup/rollup-win32-arm64-msvc": "4.39.0", + "@rollup/rollup-win32-ia32-msvc": "4.39.0", + "@rollup/rollup-win32-x64-msvc": "4.39.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/scheduler": { + "version": "0.26.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz", + "integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", + "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/vite": { + "version": "6.2.6", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.6.tgz", + "integrity": "sha512-9xpjNl3kR4rVDZgPNdTL0/c6ao4km69a/2ihNQbcANz8RuCOK3hQBmLSJf3bRKVQjVMda+YvizNE8AwvogcPbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.25.0", + "postcss": "^8.5.3", + "rollup": "^4.30.1" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "jiti": ">=1.21.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/frontend_react/package.json b/frontend_react/package.json new file mode 100644 index 0000000..e2238b3 --- /dev/null +++ b/frontend_react/package.json @@ -0,0 +1,31 @@ +{ + "name": "surfsmart_react", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "lint": "eslint .", + "preview": "vite preview" + }, + "dependencies": { + "@dnd-kit/core": "^6.3.1", + "@dnd-kit/sortable": "^10.0.0", + "dnd-kit": "^0.0.2", + "react": "^19.0.0", + "react-dom": "^19.0.0", + "react-icons": "^5.5.0" + }, + "devDependencies": { + "@eslint/js": "^9.21.0", + "@types/react": "^19.0.10", + "@types/react-dom": "^19.0.4", + "@vitejs/plugin-react": "^4.3.4", + "eslint": "^9.21.0", + "eslint-plugin-react-hooks": "^5.1.0", + "eslint-plugin-react-refresh": "^0.4.19", + "globals": "^15.15.0", + "vite": "^6.2.0" + } +} diff --git a/frontend_react/public/vite.svg b/frontend_react/public/vite.svg new file mode 100644 index 0000000..e7b8dfb --- /dev/null +++ b/frontend_react/public/vite.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend_react/src/App.jsx b/frontend_react/src/App.jsx new file mode 100644 index 0000000..545a073 --- /dev/null +++ b/frontend_react/src/App.jsx @@ -0,0 +1,90 @@ +// frontend/src/App.jsx +import React, { useState, useEffect } from 'react'; +import LeftSidebar from './components/LeftSidebar/LeftSidebar.jsx'; +import MainContent from './components/MainContent/MainContent.jsx'; +import LoginPage from './components/LoginPage/LoginPage.jsx'; +import styles from './App.module.css'; +import { getAuthToken } from './services/api'; // Assuming fetchProjects is not needed here directly + +/** + * App Component + * + * Manages authentication state AND the currently selected project ID. + * Renders either LoginPage or the main layout. + */ +function App() { + const [isLoggedIn, setIsLoggedIn] = useState(false); + const [authChecked, setAuthChecked] = useState(false); + // --- State for selected project ID --- + const [currentProjectId, setCurrentProjectId] = useState(null); // Initialize to null + + useEffect(() => { + const token = getAuthToken(); + if (token) { + setIsLoggedIn(true); + // If logged in, we might want to fetch projects and set an initial ID, + // but that logic is currently in LeftSidebar. We'll let LeftSidebar + // trigger the initial selection via the callback for now. + } else { + // Ensure currentProjectId is reset if no token found + setCurrentProjectId(null); + } + setAuthChecked(true); + }, []); + + const handleLoginSuccess = () => { + setIsLoggedIn(true); + // Reset project ID on new login, let LeftSidebar set the initial one + setCurrentProjectId(null); + }; + + const handleLogout = () => { + localStorage.removeItem('authToken'); + setIsLoggedIn(false); + setCurrentProjectId(null); // Reset project ID on logout + console.log('User logged out.'); + }; + + // --- Handler function to be passed to LeftSidebar --- + const handleProjectSelect = (projectId) => { + console.log("App: Project selected:", projectId); + setCurrentProjectId(projectId); + }; + // --- End handler --- + + console.log('Render - isLoggedIn state:', isLoggedIn); + console.log('Render - currentProjectId state:', currentProjectId); + + + if (!authChecked) { + return
Loading Authentication...
; // Or a loading spinner + } + + const containerClassName = isLoggedIn + ? `${styles.appContainer} ${styles.loggedInLayout}` + : styles.appContainer; + + console.log('Applied className:', containerClassName); + + return ( +
+ {isLoggedIn ? ( + <> + {/* Pass down currentProjectId and the selection handler */} + + {/* Pass down currentProjectId */} + + {/* Blank columns handled by CSS Grid */} + + ) : ( + + )} +
+ ); +} + +export default App; diff --git a/frontend_react/src/App.module.css b/frontend_react/src/App.module.css new file mode 100644 index 0000000..6c1d89a --- /dev/null +++ b/frontend_react/src/App.module.css @@ -0,0 +1,150 @@ +/* App.module.css */ + +/* Define reusable variables globally within this module scope or use :global(:root) */ +:global(:root) { + /* Color Palette */ + --primary-color: #b2e3b6; /* 柔和浅绿色 */ + --primary-hover-color: #9fd4a6; /* 悬停稍深一点 */ + --primary-active-color: #89c897; /* 点击时进一步加深 */ + --secondary-color: #a9b9ac; /* 中性色,偏灰绿 */ + --secondary-hover-color: #95a89b; + --accent-color: #76c28f; /* 强调色,稍饱和 */ + --accent-hover-color: #5bab74; + + --ai-background: #799fff91; + --ai-background-hover: #627cca75; + --ai-background-activate: #4063cc7c; + --ai-text: #d40000; + --ai-text-hover: #7e2525; + --ai-text-activate: #641313; + + --success-color: #6fbf73; /* 成功提示,温和的绿 */ + --danger-color: #dc6b6b; /* 警告/错误保留红色但稍柔和 */ + --warning-color: #e6c87f; /* 黄色提示柔化处理 */ + --light-color: #f3f8f4; /* 浅绿色背景,替代全白 */ + --white-color: #ffffff; + --dark-color: #2e3d31; /* 深色,但不纯黑 */ + --text-color-primary: #1d2b21; /* 主文字色,深灰绿 */ + --text-color-secondary: #5c6e5f; /* 次文字色,浅灰绿 */ + --text-color-light: #3a4b3f; /* 用于反白场景下的文字 */ + + --border-color: #cbd5cb; + --border-radius-sm: 0.25rem; + --border-radius-md: 0.375rem; + --border-radius-lg: 0.5rem; + + /* Background Arc Colors (协调的透明绿色调) */ + --arc-color-1: rgba(183, 228, 184, 0.25); /* 轻绿 */ + --arc-color-2: rgba(169, 209, 174, 0.2); /* 绿灰 */ + --arc-color-3: rgba(202, 235, 210, 0.3); /* 白绿 */ + + /* Shadows */ + --shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.03); + --shadow-md: 0 3px 6px rgba(0, 0, 0, 0.06); + --shadow-lg: 0 10px 20px rgba(0, 0, 0, 0.08); + + /* Transitions */ + --transition-fast: all 0.15s ease-in-out; + --transition-base: all 0.2s ease-in-out; + + /* Spacing */ + --spacing-xs: 4px; + --spacing-sm: 8px; + --spacing-md: 16px; + --spacing-lg: 24px; + --spacing-xl: 32px; +} + + + +/* Base styles for the app container */ +.appContainer { + min-height: 100vh; + /* background-color: var(--light-color); */ /* Background now handled by ::before */ + width: 100%; + box-sizing: border-box; + position: relative; /* Needed for z-index stacking context if ::before uses absolute */ + z-index: 1; /* Ensure content is above the ::before pseudo-element */ +} + +/* --- Fixed Background with Arcs using ::before --- */ +.appContainer::before { + content: ''; + position: fixed; /* Fixed relative to viewport */ + top: 0; + left: 0; + right: 0; + bottom: 0; + z-index: -1; /* Place behind the content */ + background-color: var(--light-color); /* Base background color */ + + /* --- SVG Background Image --- */ + /* Generated using SVG data URI. You can create more complex SVGs. */ + /* This example creates three large arcs from corners/edges */ + background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100' preserveAspectRatio='none'%3E%3C!-- Arc 1: Top Left --%3E%3Cpath d='M 0,0 L 0,50 A 50 50 0 0 1 50,0 Z' fill='rgba(174, 203, 255, 0.3)' /%3E%3C!-- Arc 2: Bottom Right --%3E%3Cpath d='M 100,100 L 50,100 A 50 50 0 0 1 100,50 Z' fill='rgba(255, 193, 174, 0.2)' /%3E%3C!-- Arc 3: Bottom Left --%3E%3Cpath d='M 0,100 L 0,70 A 80 80 0 0 0 70,100 Z' fill='rgba(167, 255, 174, 0.25)' /%3E%3C/svg%3E"); + + background-repeat: no-repeat; + /* Adjust background size and position as needed */ + /* 'cover' might distort arcs, 'contain' might leave gaps */ + /* Using fixed size/positioning might be better */ + background-size: 100% 100%; /* Stretch SVG to container */ + /* Or position specific SVGs: */ + /* background-position: top left, bottom right, bottom left; */ + /* background-size: 50% auto, 50% auto, 80% auto; */ +} + + +/* --- Styles applied ONLY when logged in (Grid Layout) --- */ +.appContainer.loggedInLayout { + display: grid; + align-items: stretch; + grid-template-columns: 2fr 2fr 6fr 2fr; /* Default large screen */ + /* Ensure grid layout itself doesn't have conflicting background */ + background-color: transparent; /* Make grid container transparent */ +} + +/* Assign components to specific grid columns ONLY when logged in */ +/* Make sure children have backgrounds so fixed background doesn't show through */ +.appContainer.loggedInLayout > :nth-child(1) { /* LeftSidebar */ + grid-column: 1 / 2; + min-height: 100vh; + background-color: var(--white-color); /* Give sidebar a background */ + z-index: 2; /* Ensure sidebar is above background */ +} + +.appContainer.loggedInLayout > :nth-child(2) { /* MainContent */ + grid-column: 3 / 4; + min-height: 100vh; + overflow-y: auto; + padding: var(--spacing-lg); + box-sizing: border-box; + background-color: transparent; /* Let appContainer::before show through blank columns */ + z-index: 2; /* Ensure content is above background */ +} + + +/* --- Responsive Breakpoints for the LOGGED-IN layout --- */ +@media (max-width: 1200px) and (min-width: 1000px) { + .appContainer.loggedInLayout { + grid-template-columns: 2fr 2fr 8fr; + } + .appContainer.loggedInLayout > :nth-child(1) { grid-column: 1 / 2; } + .appContainer.loggedInLayout > :nth-child(2) { grid-column: 3 / 4; } +} +@media (max-width: 1000px) and (min-width: 768px) { + .appContainer.loggedInLayout { + grid-template-columns: 2fr 10fr; + } + .appContainer.loggedInLayout > :nth-child(1) { grid-column: 1 / 2; } + .appContainer.loggedInLayout > :nth-child(2) { grid-column: 2 / 3; } +} + @media (max-width: 768px) { + .appContainer.loggedInLayout { + display: block; /* Revert to block for mobile when logged in */ + } + .appContainer.loggedInLayout > :nth-child(1) { grid-column: auto; } + .appContainer.loggedInLayout > :nth-child(2) { grid-column: auto; padding: var(--spacing-md); } + } + +/* --- End Logged-in Styles --- */ + diff --git a/frontend_react/src/components/LeftSidebar/LeftSidebar.jsx b/frontend_react/src/components/LeftSidebar/LeftSidebar.jsx new file mode 100644 index 0000000..ded128c --- /dev/null +++ b/frontend_react/src/components/LeftSidebar/LeftSidebar.jsx @@ -0,0 +1,183 @@ +// frontend/src/components/LeftSidebar/LeftSidebar.jsx +import React, { useState, useEffect } from 'react'; +import styles from './LeftSidebar.module.css'; +// Import createProject API function +import { fetchProjects, createProject } from '../../services/api'; +import { FaGithub, FaCog, FaPlus, FaUserCircle, FaSignOutAlt } from 'react-icons/fa'; + +/** + * LeftSidebar Component + * Fetches projects, displays them, handles creating new projects, + * and calls onProjectSelect when one is clicked. + * Highlights the selected project based on currentProjectId prop. + */ +function LeftSidebar({ onLogout, onProjectSelect, currentProjectId }) { + const [projects, setProjects] = useState([]); + const [isLoading, setIsLoading] = useState(true); // Loading state for initial fetch + const [isCreating, setIsCreating] = useState(false); // Loading state for creating project + const [error, setError] = useState(null); + const [username, setUsername] = useState('Gellar'); // Placeholder + + useEffect(() => { + // Placeholder: fetch or get username from context/auth state + // setUsername(fetchedUsername); + }, []); + + // Function to fetch projects, reusable + const loadProjects = async () => { + setIsLoading(true); + setError(null); + try { + const data = await fetchProjects(); + console.log("LeftSidebar: Fetched projects data:", data); + setProjects(data || []); + return data || []; // Return fetched data + } catch (err) { + if (err.message === "Authentication failed. Please log in again.") { + setError('Authentication error. Please log in.'); + } else { + console.error("LeftSidebar: Failed to fetch projects:", err); + setError('Failed to load projects.'); + } + setProjects([]); // Clear projects on error + if(onProjectSelect) { + onProjectSelect(null); // Clear selection in App on error + } + return []; // Return empty array on error + } finally { + setIsLoading(false); + } + }; + + + // Initial load and setting initial selection + useEffect(() => { + loadProjects().then(initialProjects => { + // Set initial project selection if none is selected yet + if (initialProjects && initialProjects.length > 0 && currentProjectId === null && onProjectSelect) { + console.log("LeftSidebar: Setting initial project:", initialProjects[0].id); + onProjectSelect(initialProjects[0].id); + } else if ((!initialProjects || initialProjects.length === 0) && onProjectSelect) { + onProjectSelect(null); + } + }); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [onProjectSelect]); // Run only once on mount conceptually (onProjectSelect should be stable) + + + const handleSelectProject = (projectId) => { + if (onProjectSelect) { + onProjectSelect(projectId); + } + console.log("LeftSidebar: Selected project:", projectId); + }; + + // --- Updated handleNewProject --- + const handleNewProject = async () => { + console.log("Create new project clicked"); + const name = prompt("Enter new project name:"); // Get name from user + + if (name && name.trim() !== '') { + setIsCreating(true); // Set loading state for creation + setError(null); // Clear previous errors + try { + // Call the createProject API function + const newProjectData = await createProject({ name: name.trim() }); + console.log("LeftSidebar: Project created successfully:", newProjectData); + + // Refresh the project list to include the new one + await loadProjects(); // Reuse the fetching logic + + // Automatically select the newly created project + // Ensure newProjectData.id exists (mapped in api.js) + if (newProjectData && newProjectData.id && onProjectSelect) { + console.log("LeftSidebar: Selecting newly created project:", newProjectData.id); + onProjectSelect(newProjectData.id); + } else { + console.warn("LeftSidebar: Could not get ID of newly created project to select it."); + } + + } catch (error) { + console.error("LeftSidebar: Failed to create project:", error); + setError(`Error creating project: ${error.message}`); // Show error specific to creation + alert(`Error creating project: ${error.message}`); // Also show alert + } finally { + setIsCreating(false); // Reset loading state + } + } else if (name !== null) { // Only show alert if prompt wasn't cancelled + alert("Project name cannot be empty."); + } + }; + // --- End updated handleNewProject --- + + const handleLogoutClick = () => { + if (onLogout && typeof onLogout === 'function') { + onLogout(); + } + }; + + return ( +
+
+ Icon + SurfSmart +
+ + {/* Disable button while creating */} + + + {/* Display creation error */} + {error && !isLoading &&

{error}

} + + + +
+
+ + {username} +
+
+ + + + + + + +
+
+
+ ); +} + +export default LeftSidebar; diff --git a/frontend_react/src/components/LeftSidebar/LeftSidebar.module.css b/frontend_react/src/components/LeftSidebar/LeftSidebar.module.css new file mode 100644 index 0000000..7bc1f09 --- /dev/null +++ b/frontend_react/src/components/LeftSidebar/LeftSidebar.module.css @@ -0,0 +1,213 @@ +/* components/LeftSidebar/LeftSidebar.module.css */ +.sidebar { + background-color: var(--background-color); /* White background */ + /* padding: 20px; */ /* Use variable */ + padding: var(--spacing-lg) var(--spacing-md); /* Adjust padding */ + display: flex; + flex-direction: column; + border-right: 1px solid var(--border-color); /* Use variable */ + box-shadow: var(--shadow-sm); /* Subtle shadow */ + box-sizing: border-box; + grid-row: 1 / -1; + transition: var(--transition-base); /* Add transition for potential future changes like collapse */ +} + +.logoSection { + margin-bottom: var(--spacing-xl); /* Use variable */ + display: flex; + align-items: center; + padding-left: var(--spacing-xs); /* Align with project items */ +} + +.logo { + font-weight: bold; + margin-right: var(--spacing-sm); + /* Placeholder style */ + border: 1px solid #ccc; + padding: 5px 10px; + border-radius: var(--border-radius-sm); +} + +.appName { + font-size: 1.2em; + font-weight: 600; /* Slightly bolder */ + color: var(--text-color-primary); +} + +.newProjectButton { + background-color: var(--primary-color); + color: var(--text-color-light); + border: none; + padding: 10px 15px; + border-radius: var(--border-radius-md); /* Use variable */ + cursor: pointer; + font-size: 1em; + margin-bottom: var(--spacing-lg); /* Use variable */ + display: flex; + align-items: center; + justify-content: center; + transition: var(--transition-base); /* Use variable */ + box-shadow: var(--shadow-sm); +} + +.newProjectButton:hover { + background-color: var(--primary-hover-color); + box-shadow: var(--shadow-md); + transform: translateY(-1px); /* Subtle lift */ +} + +.newProjectButton:active { + background-color: var(--primary-active-color); + transform: translateY(0px); + box-shadow: none; +} + + +.newProjectIcon { + margin-left: var(--spacing-sm); +} + +.projectList { + flex-grow: 1; + overflow-y: auto; +} + +.projectItem { + display: block; + padding: var(--spacing-sm) var(--spacing-md); /* Adjust padding */ + margin-bottom: var(--spacing-xs); /* Use variable */ + text-decoration: none; + color: var(--text-color-secondary); /* Use variable */ + border-radius: var(--border-radius-md); /* Use variable */ + transition: var(--transition-fast); /* Use variable */ + font-weight: 500; + position: relative; /* For potential ::before pseudo-element */ +} + +.projectItem:hover { + background-color: var(--light-color); /* Use variable */ + color: var(--text-color-primary); +} + +.projectItem.selected { + background-color: var(--primary-color); /* Use variable */ + font-weight: 600; + color: var(--text-color-light); /* Use variable */ + box-shadow: inset 0 1px 3px rgba(0, 0, 0, 0.1); /* Inner shadow for selected */ +} + +/* Optional: Add a small indicator bar for selected item */ +.projectItem.selected::before { + content: ''; + position: absolute; + left: 0; + top: 5px; + bottom: 5px; + width: 3px; + background-color: var(--accent-color); + border-radius: 0 3px 3px 0; +} + + +.noProjects { + color: var(--text-color-secondary); + font-style: italic; + padding: var(--spacing-sm) var(--spacing-md); +} + +.bottomSection { + margin-top: auto; + padding-top: var(--spacing-md); /* Use variable */ + border-top: 1px solid var(--border-color); /* Use variable */ + display: flex; + align-items: center; + justify-content: space-between; + gap: var(--spacing-sm); /* Use variable */ +} + +/* --- Styles for Account Info --- */ +.accountInfoCapsule { + display: flex; + align-items: center; + background-color: transparent; /* Make transparent */ + border: 1px solid var(--border-color); /* Add border */ + border-radius: 20px; + padding: var(--spacing-xs) var(--spacing-sm); /* Adjust padding */ + cursor: default; + transition: var(--transition-fast); + flex-shrink: 1; + min-width: 0; + overflow: hidden; +} + +.accountInfoCapsule:hover { + background-color: var(--light-color); /* Light bg on hover */ + border-color: #bbb; /* Slightly darker border */ +} + +.avatarPlaceholder { + font-size: 1.4em; + color: var(--text-color-secondary); + margin-right: var(--spacing-sm); + flex-shrink: 0; +} + +.usernameDisplay { + font-size: 0.9em; + font-weight: 500; + color: var(--text-color-primary); + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} +/* --- End Account Info Styles --- */ + +.actionIcons { + display: flex; + align-items: center; + gap: var(--spacing-md); /* Increase gap slightly */ + flex-shrink: 0; +} + +.iconLink { + color: var(--text-color-secondary); + font-size: 1.2em; /* Slightly smaller icons */ + text-decoration: none; + transition: var(--transition-fast); + display: flex; + align-items: center; + padding: var(--spacing-xs); /* Add padding for easier click */ + border-radius: 50%; /* Make icon background circular on hover */ +} + +.iconLink:hover { + color: var(--text-color-primary); + background-color: var(--light-color); /* Add background on hover */ +} + +/* --- Logout Button Style --- */ +.logoutButton { + background: none; + border: none; + padding: var(--spacing-xs); /* Match iconLink padding */ + margin: 0; + cursor: pointer; + color: var(--danger-color); /* Use variable */ + font-size: 1.2em; /* Match iconLink size */ + display: flex; + align-items: center; + border-radius: 50%; /* Match iconLink radius */ + transition: var(--transition-fast); +} + +.logoutButton:hover { + color: var(--text-color-light); + background-color: var(--danger-color); /* Red background on hover */ +} +/* --- End Logout Button Style --- */ + +.error { + color: var(--danger-color); + font-size: 0.9em; + padding: 0 var(--spacing-md); /* Add padding */ +} diff --git a/frontend_react/src/components/LoginPage/LoginPage.jsx b/frontend_react/src/components/LoginPage/LoginPage.jsx new file mode 100644 index 0000000..0edacb1 --- /dev/null +++ b/frontend_react/src/components/LoginPage/LoginPage.jsx @@ -0,0 +1,156 @@ +import React, { useState } from 'react'; +import { loginUser } from '../../services/api'; // Import the login API function +import styles from './LoginPage.module.css'; +// Optional: Import an icon library, e.g., react-icons +import { FaEye, FaEyeSlash } from 'react-icons/fa'; // Example using Font Awesome icons + +/** + * LoginPage Component + * + * Provides a form for users to log in using username and password. + * Handles input, submission, API calls, token storage, and error display. + */ +function LoginPage({ onLoginSuccess }) { + const [username, setUsername] = useState(''); + const [password, setPassword] = useState(''); + const [isLoading, setIsLoading] = useState(false); + const [error, setError] = useState(null); + const [showPassword, setShowPassword] = useState(false); + + const handleSubmit = async (event) => { + event.preventDefault(); + setIsLoading(true); + setError(null); + + try { + const response = await loginUser(username, password); + + // --- DEBUGGING LOGS --- + console.log('Login API response received:', response); // Log the entire response object + // --- END DEBUGGING LOGS --- + + if (response && response.token) { // Added check for response object itself + // --- DEBUGGING LOGS --- + console.log('Token found in response, attempting to store:', response.token); + // --- END DEBUGGING LOGS --- + + localStorage.setItem('authToken', response.token); + + // --- DEBUGGING LOGS --- + // Verify immediately after setting + const storedToken = localStorage.getItem('authToken'); + console.log('Token potentially stored. Value in localStorage:', storedToken); + if (storedToken !== response.token) { + console.error("!!! Token mismatch after setting in localStorage !!!"); + } + // --- END DEBUGGING LOGS --- + + + console.log('Login successful, proceeding...'); + if (onLoginSuccess) { + onLoginSuccess(); + } else { + window.location.reload(); + } + } else { + // --- DEBUGGING LOGS --- + console.log('No token found in API response object.'); + // --- END DEBUGGING LOGS --- + setError('Login failed: No token received from server.'); // Updated error message + } + } catch (err) { + setError(err.message || 'Login failed. Please check your credentials.'); + console.error("Login error object:", err); // Log the full error object + } finally { + setIsLoading(false); + } + }; + + return ( +
+
+

Login

+

Access your SurfSmart dashboard

+ +
+ {/* --- Username Input Group --- */} +
+ +
+ setUsername(e.target.value)} + required + placeholder="Enter your username" + disabled={isLoading} + autoComplete="username" + aria-invalid={error ? "true" : "false"} + /> +
+
+ + {/* --- Password Input Group --- */} +
+ +
+ setPassword(e.target.value)} + required + placeholder="Enter your password" + disabled={isLoading} + autoComplete="current-password" + aria-invalid={error ? "true" : "false"} + /> + +
+
+ + {/* --- Error Message Display Area --- */} + {error && ( +
+ {error} +
+ )} + + {/* --- Login Button --- */} + + + {/* --- Optional Links: Register or Forgot Password --- */} + +
+
+
+ ); +} + +export default LoginPage; diff --git a/frontend_react/src/components/LoginPage/LoginPage.module.css b/frontend_react/src/components/LoginPage/LoginPage.module.css new file mode 100644 index 0000000..b3ee9f3 --- /dev/null +++ b/frontend_react/src/components/LoginPage/LoginPage.module.css @@ -0,0 +1,281 @@ +/* components/LoginPage/LoginPage.module.css */ + +/* Define global or component-scoped CSS Variables */ +/* Preferably place these in :root or a global CSS file */ +:global(:root) { /* Use :global if this is module CSS and you want to define global variables */ + --primary-color: #007bff; /* Primary theme color */ + --primary-hover-color: #0056b3; /* Primary hover color */ + --primary-active-color: #004085; /* Primary active color */ + --error-color: #dc3545; /* Error state color */ + --error-background-color: rgba(220, 53, 69, 0.08); /* Error background */ + --error-border-color: rgba(220, 53, 69, 0.2); /* Error border */ + --success-color: #28a745; /* Success state color */ + --input-border-color: #ced4da; /* Input border color */ + --input-focus-border-color: var(--primary-color); /* Input focus border */ + --input-focus-shadow: 0 0 0 3px rgba(0, 123, 255, 0.15); /* Input focus shadow */ + --text-color-primary: #212529; /* Primary text color */ + --text-color-secondary: #6c757d; /* Secondary text color */ + --text-color-button: #ffffff; /* Button text color */ + /* Define colors for the animated gradient */ + --gradient-color-1: #aecbff; + --gradient-color-2: #ff7b7b; + --gradient-color-3: #c8df66; + --gradient-color-4: #0073ff; + --background-card: #ffffff; /* Card background color */ + --font-family-base: system-ui, -apple-system, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, 'Noto Sans', 'Liberation Sans', sans-serif; /* Base font family */ + --border-radius-base: 6px; /* Base border radius */ + --transition-base: all 0.2s ease-in-out; /* Base transition */ +} + +/* Keyframes for the background gradient animation */ +@keyframes gradientShift { + 0% { background-position: 0% 50%; } + 50% { background-position: 100% 50%; } + 100% { background-position: 0% 50%; } +} + +.loginContainer { + display: flex; + justify-content: center; + align-items: center; + min-height: 100vh; + /* Updated: Apply an animated linear gradient background */ + background: linear-gradient(-45deg, var(--gradient-color-1), var(--gradient-color-2), var(--gradient-color-3), var(--gradient-color-4)); + background-size: 400% 400%; /* Make gradient larger than the container */ + animation: gradientShift 15s ease infinite; /* Apply the animation */ + padding: 20px; + font-family: var(--font-family-base); /* Apply base font */ + width: 100%; + box-sizing: border-box; +} + +.loginBox { + background-color: var(--background-card); /* Use variable */ + padding: 40px 35px; /* Slightly adjust padding */ + border-radius: var(--border-radius-base); /* Use variable */ + box-shadow: 0 6px 20px rgba(0, 0, 0, 0.08); /* Softer shadow */ + width: 100%; + max-width: 420px; /* Slightly increase width */ + text-align: center; + box-sizing: border-box; + z-index: 1; /* Ensure login box is above the background */ +} + +/* Optional: Logo styles */ +.logo { + max-width: 150px; + margin-bottom: 25px; +} + +.title { + margin-top: 0; + margin-bottom: 10px; + font-size: 2.2em; /* Increase title font size */ + font-weight: 600; /* Adjust font weight */ + color: var(--text-color-primary); /* Use variable */ +} + +.subtitle { + margin-bottom: 35px; /* Increase bottom margin */ + color: var(--text-color-secondary); /* Use variable */ + font-size: 1.05em; /* Adjust font size */ +} + +.inputGroup { + margin-bottom: 22px; /* Adjust margin */ + text-align: left; + position: relative; /* Provide base for absolute positioning inside */ +} + +.inputGroup label { + display: block; + margin-bottom: 8px; /* Adjust margin below label */ + font-weight: 500; + color: var(--text-color-primary); /* Use variable */ + font-size: 0.95em; +} + +/* Wrapper for input and icon/button */ +.inputWrapper { + position: relative; + display: flex; /* For aligning icon and input */ + align-items: center; +} + +/* Optional: Styles for icon next to input */ +.inputIcon { + position: absolute; + left: 12px; + top: 50%; + transform: translateY(-50%); + color: var(--text-color-secondary); + pointer-events: none; /* Prevent icon from interfering with clicks */ + z-index: 1; /* Ensure it's above input background */ +} + +.inputGroup input { + width: 100%; + padding: 12px 15px; /* Base padding */ + /* Increase left padding if using left icon */ + /* padding-left: 40px; */ + /* Increase right padding if using right button (password toggle) */ + padding-right: 45px; + border: 1px solid var(--input-border-color); /* Use variable */ + border-radius: var(--border-radius-base); /* Use variable */ + font-size: 1em; + box-sizing: border-box; + color: var(--text-color-primary); + background-color: var(--background-card); + transition: var(--transition-base); /* Apply base transition */ +} + +.inputGroup input::placeholder { + color: var(--text-color-secondary); + opacity: 0.7; +} + +.inputGroup input:focus { + outline: none; + border-color: var(--input-focus-border-color); /* Use variable */ + box-shadow: var(--input-focus-shadow); /* Use variable */ +} + +/* Style when input is invalid */ +.inputGroup input[aria-invalid="true"] { + border-color: var(--error-color); +} +.inputGroup input[aria-invalid="true"]:focus { + border-color: var(--error-color); + box-shadow: 0 0 0 3px rgba(220, 53, 69, 0.15); /* Focus shadow for error state */ +} + + +/* Password visibility toggle button styles */ +.passwordToggle { + position: absolute; + right: 0px; /* Position to the right */ + top: 0; + height: 100%; /* Same height as input */ + background: transparent; + border: none; + padding: 0 12px; /* Left/right padding */ + cursor: pointer; + color: var(--text-color-secondary); + display: flex; + align-items: center; + justify-content: center; + transition: color 0.2s ease; +} + +.passwordToggle:hover, +.passwordToggle:focus { + color: var(--text-color-primary); + outline: none; /* Remove default outline, rely on parent focus style */ +} + +.passwordToggle:disabled { + cursor: not-allowed; + opacity: 0.5; +} + +/* Error message styles */ +.errorMessage { + color: var(--error-color); /* Use variable */ + background-color: var(--error-background-color); /* Use variable */ + border: 1px solid var(--error-border-color); /* Use variable */ + padding: 10px 15px; + border-radius: var(--border-radius-base); /* Use variable */ + margin-top: 5px; /* Space between error and input */ + margin-bottom: 15px; + font-size: 0.9em; + text-align: left; /* Align error message left */ + display: flex; /* For aligning optional icon */ + align-items: center; + transition: var(--transition-base); /* Apply base transition */ +} + +/* Optional: Error icon styles */ +.errorIcon { + margin-right: 8px; + flex-shrink: 0; /* Prevent icon from shrinking */ +} + +/* Login button styles */ +.loginButton { + width: 100%; + padding: 12px 20px; + background-color: var(--primary-color); /* Use variable */ + color: var(--text-color-button); /* Use variable */ + border: none; + border-radius: var(--border-radius-base); /* Use variable */ + font-size: 1.1em; + font-weight: 600; /* Slightly bolder text */ + cursor: pointer; + transition: var(--transition-base); /* Apply base transition */ + margin-top: 15px; /* Space above button */ + position: relative; /* For spinner positioning */ + overflow: hidden; /* Hide overflow if spinner is absolutely positioned */ + display: flex; /* Use flex to center content */ + align-items: center; + justify-content: center; + gap: 8px; /* Space between text and spinner */ +} + +.loginButton:hover { + background-color: var(--primary-hover-color); /* Use variable */ +} + +.loginButton:active { + background-color: var(--primary-active-color); /* Use variable */ + transform: translateY(1px); /* Subtle press effect */ +} + +.loginButton:disabled { + background-color: #cccccc; + cursor: not-allowed; + opacity: 0.65; /* Adjust opacity for disabled state */ +} + +/* Hide text when loading, spinner will be shown */ +.loginButton.loading .buttonText { + /* Optional: uncomment to hide text when loading */ + /* display: none; */ +} + +/* Loading spinner styles */ +.spinner { + display: inline-block; + width: 1em; /* Relative to font size */ + height: 1em; /* Relative to font size */ + border: 2px solid rgba(255, 255, 255, 0.3); /* Lighter border */ + border-radius: 50%; + border-top-color: var(--text-color-button); /* Spinner color */ + animation: spin 1s ease-in-out infinite; + vertical-align: middle; /* Align with text */ +} + +@keyframes spin { + to { transform: rotate(360deg); } +} + +/* Optional links styles */ +.links { + margin-top: 25px; + font-size: 0.9em; + line-height: 1.6; /* Add line-height for better spacing when stacked */ +} + +.links a { + color: var(--primary-color); /* Use variable */ + text-decoration: none; + /* Updated: Make links block elements to stack vertically */ + display: block; + /* Updated: Remove horizontal margin, add vertical margin */ + margin: 8px 0; /* Add some space between stacked links */ + transition: color 0.2s ease; +} + +.links a:hover { + text-decoration: underline; + color: var(--primary-hover-color); /* Use variable */ +} diff --git a/frontend_react/src/components/MainContent/MainContent.jsx b/frontend_react/src/components/MainContent/MainContent.jsx new file mode 100644 index 0000000..f8da75b --- /dev/null +++ b/frontend_react/src/components/MainContent/MainContent.jsx @@ -0,0 +1,291 @@ +// frontend/src/components/MainContent/MainContent.jsx +import React, { useState, useEffect, useRef } from 'react'; // Import useRef +import ProjectHeader from '../ProjectHeader/ProjectHeader.jsx'; +import UrlCardList from '../UrlCardList/UrlCardList.jsx'; +import UrlDetailPage from '../UrlDetailPage/UrlDetailPage.jsx'; +import styles from './MainContent.module.css'; +import { + fetchProjectDetails, + fetchProjectUrls, + addUrlToProject, + askAiAboutProject, + deleteUrlFromProject, + regenerateSummary, + fetchUrlDetails // Import fetchUrlDetails for polling +} from '../../services/api'; +import { FaPlus, FaMagic } from 'react-icons/fa'; + +// --- Constants --- +const POLLING_INTERVAL_MS = 5000; // Check every 5 seconds + +function MainContent({ currentProjectId }) { + const [projectDetails, setProjectDetails] = useState(null); + const [urls, setUrls] = useState([]); + const [isLoading, setIsLoading] = useState(false); + const [error, setError] = useState(null); + const [aiResponse, setAiResponse] = useState(''); + const [detailUrlId, setDetailUrlId] = useState(null); + + // --- Polling State --- + // Use useRef to store the interval ID so it doesn't trigger re-renders + const pollIntervalRef = useRef(null); + // --- End Polling State --- + + // Function to update a single URL in the state + const updateSingleUrlState = (updatedUrlData) => { + if (!updatedUrlData || !updatedUrlData.id) return; + setUrls(currentUrls => { + // Create a flag to see if an update actually happened + let updated = false; + const newUrls = currentUrls.map(url => { + if (url.id === updatedUrlData.id) { + // Only update if data has actually changed to avoid infinite loops + // Compare relevant fields like status, title, summary + if (url.processingStatus !== updatedUrlData.processingStatus || + url.title !== updatedUrlData.title || + url.summary !== updatedUrlData.summary) { + updated = true; + return { ...url, ...updatedUrlData, isLoading: false }; // Merge new data + } + } + return url; + }); + // Only set state if an update occurred + return updated ? newUrls : currentUrls; + }); + }; + + + // Effect for initial data load when project changes + useEffect(() => { + setDetailUrlId(null); + // Clear any existing polling interval when project changes + if (pollIntervalRef.current) { + clearInterval(pollIntervalRef.current); + pollIntervalRef.current = null; + } + + if (!currentProjectId) { + setProjectDetails(null); setUrls([]); setIsLoading(false); setError(null); + return; + }; + setIsLoading(true); setError(null); setAiResponse(''); + Promise.all([ fetchProjectDetails(currentProjectId), fetchProjectUrls(currentProjectId) ]) + .then(([details, urlsData]) => { + setProjectDetails(details); setUrls(urlsData || []); setIsLoading(false); + }) + .catch(err => { + console.error("MainContent: Failed to load project data:", err); + // ... (error handling as before) ... + setError(`Failed to load data for project ${currentProjectId}.`); + setProjectDetails(null); setUrls([]); setIsLoading(false); + }); + + // Cleanup function for when component unmounts or currentProjectId changes + return () => { + if (pollIntervalRef.current) { + clearInterval(pollIntervalRef.current); + pollIntervalRef.current = null; + console.log("Polling interval cleared on project change/unmount."); + } + }; + }, [currentProjectId]); + + + // --- Effect for Polling Pending URLs --- + useEffect(() => { + const pendingUrls = urls.filter(url => url.processingStatus === 'pending'); + + if (pendingUrls.length > 0 && !pollIntervalRef.current) { + // Start polling only if there are pending URLs and polling isn't already running + console.log(`Polling started for ${pendingUrls.length} pending URL(s).`); + pollIntervalRef.current = setInterval(async () => { + console.log("Polling: Checking status of pending URLs..."); + const currentPendingIds = urls + .filter(u => u.processingStatus === 'pending') + .map(u => u.id); + + if (currentPendingIds.length === 0) { + console.log("Polling: No more pending URLs, stopping interval."); + clearInterval(pollIntervalRef.current); + pollIntervalRef.current = null; + return; + } + + // Fetch details for each pending URL + // Using Promise.allSettled to handle individual fetch failures + const results = await Promise.allSettled( + currentPendingIds.map(id => fetchUrlDetails(id)) + ); + + let anyUpdates = false; + results.forEach((result, index) => { + const urlId = currentPendingIds[index]; + if (result.status === 'fulfilled') { + const updatedData = result.value; + // Check if the status is no longer pending + if (updatedData && updatedData.processingStatus !== 'pending') { + console.log(`Polling: URL ${urlId} status updated to ${updatedData.processingStatus}. Updating state.`); + updateSingleUrlState(updatedData); // Update the specific URL in state + anyUpdates = true; + } + } else { + // Handle fetch error for a specific URL during polling + console.error(`Polling: Failed to fetch details for URL ${urlId}:`, result.reason); + // Optionally mark this URL as failed in the state if fetch fails consistently? + // updateSingleUrlState({ id: urlId, processingStatus: 'failed', summary: 'Failed to fetch status.' }); + // anyUpdates = true; + } + }); + + // If all polled URLs are now completed/failed, stop the interval early + // Check the main 'urls' state again after potential updates + const stillPending = urls.some(u => u.processingStatus === 'pending'); + if (!stillPending && pollIntervalRef.current) { + console.log("Polling: All polled URLs completed/failed, stopping interval."); + clearInterval(pollIntervalRef.current); + pollIntervalRef.current = null; + } + + + }, POLLING_INTERVAL_MS); + + } else if (pendingUrls.length === 0 && pollIntervalRef.current) { + // Stop polling if no pending URLs remain + console.log("Polling stopped: No pending URLs."); + clearInterval(pollIntervalRef.current); + pollIntervalRef.current = null; + } + + // Cleanup function for this specific effect (when urls state changes) + // This ensures the interval is cleared if the component unmounts while polling + return () => { + if (pollIntervalRef.current) { + clearInterval(pollIntervalRef.current); + // console.log("Polling interval cleared on effect cleanup."); + // Setting ref to null here might cause issues if another effect relies on it immediately + // It's better handled by the main cleanup in the projectId effect + } + }; + }, [urls]); // Re-run this effect whenever the urls state changes + // --- End Polling Effect --- + + + const handleViewUrlDetails = (urlId) => { setDetailUrlId(urlId); }; + const handleBackToList = () => { setDetailUrlId(null); }; + + const handleAddUrl = () => { + // ... (implementation as before, setting initial status to 'pending') ... + if (!currentProjectId) { alert("Please select a project first."); return; } + let newUrl = prompt("Enter the new URL (e.g., https://example.com or example.com):"); + + if (newUrl && newUrl.trim() !== '') { + let processedUrl = newUrl.trim(); + if (!/^(https?:\/\/|\/\/)/i.test(processedUrl)) { + processedUrl = 'https://' + processedUrl; + } + + const placeholderId = `temp-${Date.now()}`; + const placeholderCard = { + id: placeholderId, url: processedUrl, title: '(Processing...)', + summary: '', note: '', keywords: [], starred: false, favicon: null, + processingStatus: 'pending', + }; + setUrls(prevUrls => [placeholderCard, ...prevUrls]); + + addUrlToProject(currentProjectId, processedUrl) + .then(addedUrlData => { + setUrls(prevUrls => prevUrls.map(url => { + if (url.id === placeholderId) { + return { + id: addedUrlData.id, url: processedUrl, title: '', summary: '', + note: '', keywords: [], starred: false, favicon: null, + processingStatus: 'pending', // Set correct initial status + createdAt: new Date().toISOString(), updatedAt: new Date().toISOString() + }; + } else { return url; } + })); + }) + .catch(err => { + console.error("Failed to add URL:", err); + setUrls(prevUrls => prevUrls.filter(url => url.id !== placeholderId)); + alert(`Failed to add URL: ${err.message || 'An unknown error occurred.'}`); + }); + } else if (newUrl !== null) { alert("URL cannot be empty."); } + }; + + const handleAskAi = () => { /* ... */ }; + const handleUrlOrderChange = (newOrder) => { /* ... */ }; + const handleDeleteUrl = (urlIdToDelete) => { /* ... */ }; + const handleRegenerateSummary = (urlIdToRegen) => { + // ... (implementation as before, sets isLoading on the specific card) ... + // This function should now also ensure the status becomes 'pending' + // so the poller can pick it up if needed, or update directly on success. + if (!currentProjectId) return; + setUrls(prevUrls => prevUrls.map(url => + url.id === urlIdToRegen ? { ...url, isLoading: true, processingStatus: 'pending', summary: 'Regenerating...' } : url // Set status to pending + )); + regenerateSummary(urlIdToRegen) + .then(updatedUrlData => { + setUrls(prevUrls => prevUrls.map(url => { + if (url.id === urlIdToRegen) { + // Merge result, ensure isLoading is false + // API returns status 'pending' if queued, or full data on sync completion + if (updatedUrlData.status === 'pending') { + return { ...url, isLoading: false, processingStatus: 'pending', summary: 'Regeneration queued...' }; + } else { + // Assume completion if status isn't pending + return { ...updatedUrlData, id: urlIdToRegen, isLoading: false }; // Ensure ID is correct + } + } + return url; + })); + }) + .catch(err => { + console.error("Failed to regenerate summary:", err); + setUrls(prevUrls => prevUrls.map(url => + // Set status back? Or maybe to failed? Let's mark failed. + url.id === urlIdToRegen ? { ...url, isLoading: false, processingStatus: 'failed', summary: 'Regeneration failed.' } : url + )); + alert(`Regeneration failed: ${err.message}`); + }); + }; + + + // --- Render Logic --- + if (isLoading) return
Loading project data...
; + if (!currentProjectId && !isLoading) return
Select a project from the sidebar to view details.
; + if (error && !detailUrlId) return
{error}
; + if (!projectDetails && !isLoading && !error && currentProjectId && !detailUrlId) return
Could not load details for the selected project.
; + + return ( +
+ {detailUrlId ? ( + + ) : ( + <> + {projectDetails && ( )} + +
+ {/* ... action buttons ... */} + + +
+ {aiResponse &&
{aiResponse}
} + + )} +
+ ); +} + +export default MainContent; diff --git a/frontend_react/src/components/MainContent/MainContent.module.css b/frontend_react/src/components/MainContent/MainContent.module.css new file mode 100644 index 0000000..922e166 --- /dev/null +++ b/frontend_react/src/components/MainContent/MainContent.module.css @@ -0,0 +1,113 @@ +/* components/MainContent/MainContent.module.css */ +.mainContent { + flex: 1 1 auto; + background-color: transparent; + display: flex; + flex-direction: column; + overflow-y: auto; + position: relative; +} + +.loading, .error, .noProjectSelected { + display: flex; + justify-content: center; + align-items: center; + flex-grow: 1; + font-size: 1.2em; + color: var(--text-color-secondary); /* Use variable */ +} + +.error { + color: var(--danger-color); /* Use variable */ +} + +.actionBar { + margin-top: var(--spacing-lg); + padding-top: var(--spacing-lg); + border-top: 1px solid var(--border-color); /* Use variable */ + display: flex; + justify-content: flex-end; + gap: var(--spacing-md); + padding-bottom: var(--spacing-sm); +} + +.actionButton { + /* Base styles */ + border: none; + padding: 10px 20px; + border-radius: var(--border-radius-md); + cursor: pointer; + font-size: 1em; + font-weight: 500; + display: flex; + align-items: center; + justify-content: center; + gap: var(--spacing-sm); + transition: var(--transition-base); + box-shadow: var(--shadow-sm); +} + +.actionButton:hover { + box-shadow: var(--shadow-md); + transform: translateY(-1px); +} +.actionButton:active { + transform: translateY(0px); + box-shadow: none; +} +.actionButton:disabled { + opacity: 0.65; + cursor: not-allowed; + box-shadow: none; + transform: none; +} + + +/* --- Updated AI Button Styles --- */ +/* Ask AI Button (Assuming it's the first button) */ +.actionButton:first-child { + background-color: var(--ai-background); /* Use light background */ + color: var(--ai-text); /* Use AI text color (dark red) */ + border-color: var(--ai-text); /* Use AI text color for border */ +} +.actionButton:first-child:hover { + background-color: var(--ai-background-hover); /* Very subtle red background */ + color: var(--ai-text-hover); + border-color: var(--ai-text-hover); +} +.actionButton:first-child:active { + background-color: var(--ai-background-activate); /* Slightly darker subtle red background */ + color: var(--ai-text-activate); + border-color: var(--ai-text-activate); +} +/* --- End AI Button Styles --- */ + + +/* Add URL Button (Assuming it's the last button) */ +.actionButton:last-child { + background-color: var(--success-color); /* Use variable */ + color: var(--text-color-light); /* Use variable */ +} +.actionButton:last-child:hover { + background-color: #58a85c; /* Slightly darker success */ +} +.actionButton:last-child:active { + background-color: #4a9b4f; +} + + +.actionIcon { + line-height: 1; +} + +.aiResponseArea { + margin-top: var(--spacing-md); + padding: var(--spacing-md); + background-color: #e9f5ff; /* Keep light blue or use a new variable */ + border: 1px solid #bce8f1; + border-radius: var(--border-radius-sm); + color: #31708f; + white-space: pre-wrap; + font-size: 0.95em; +} + diff --git a/frontend_react/src/components/ProjectHeader/ProjectHeader.jsx b/frontend_react/src/components/ProjectHeader/ProjectHeader.jsx new file mode 100644 index 0000000..b0178d2 --- /dev/null +++ b/frontend_react/src/components/ProjectHeader/ProjectHeader.jsx @@ -0,0 +1,87 @@ +// --- 需要根据后端 projects.py 提供的 API 进行修改 --- +// 1. 确认接收的 props (name, description, topic, summary, keywords) 与 MainContent 传递的一致 +// 2. 确保 WordCloud 组件能正确处理 keywords: [{word, percentage}] +// 3. (可选) 添加编辑项目、重新计算关键词等操作的触发器 (按钮) +// ---------------------------------------------------- + +import React from 'react'; +import styles from './ProjectHeader.module.css'; + +// Updated WordCloud component to accept keywords prop and map it +const WordCloud = ({ keywords }) => { + // Map backend keywords { word, percentage } to { text, value } if needed by a library + // Or render directly + const wordCloudData = keywords?.map(kw => ({ text: kw.word, value: kw.percentage })) || []; + + if (!wordCloudData || wordCloudData.length === 0) { + return
No keyword data available. (Recalculate?)
; + } + + // Simple display for placeholder - Replace with actual word cloud rendering + const maxPercentage = Math.max(...wordCloudData.map(d => d.value), 0) || 100; + + return ( +
+ {wordCloudData.slice(0, 20).map((item, index) => ( // Show top 20 words + + {item.text} + + ))} +
+ ); +}; + + +/** + * ProjectHeader Component + * Displays the project's name, description, topic, summary and keywords. + */ +// Accept more props based on backend response +function ProjectHeader({ name, description, topic, summary, keywords }) { + // TODO: Add handlers for Edit, Recalculate Keywords, Delete Project if buttons are added + // const handleEditClick = () => { ... }; + // const handleRecalcClick = () => { ... call recalculateProjectKeywords API ... }; + + return ( +
+ {/* Left side: Name and Description */} +
+

{name || 'Project Name'}

+ {/* Display Topic if available */} + {topic &&

Topic: {topic}

} +

{description || 'No description provided.'}

+ {/* Display AI Summary if available */} + {summary &&

AI Summary: {summary}

} + {/* TODO: Add Edit button here? */} +
+ + {/* Right side: Global Word Cloud */} +
+

Project Keywords

+ + {/* Optional: Button to trigger recalculation */} + {/* */} +
+ {/* TODO: Add Delete Project button somewhere? Maybe outside this component */} +
+ ); +} + +// Add styles for topic, summary, recalcButton in ProjectHeader.module.css if needed +// styles.css: +// .projectTopic { font-style: italic; color: var(--secondary-color); margin-bottom: 5px; } +// .projectSummary { margin-top: 10px; padding-top: 10px; border-top: 1px dashed var(--border-color); font-size: 0.9em; color: var(--text-color-secondary); } +// .recalcButton { margin-top: 10px; font-size: 0.8em; padding: 4px 8px; } + + +export default ProjectHeader; diff --git a/frontend_react/src/components/ProjectHeader/ProjectHeader.module.css b/frontend_react/src/components/ProjectHeader/ProjectHeader.module.css new file mode 100644 index 0000000..179d8ca --- /dev/null +++ b/frontend_react/src/components/ProjectHeader/ProjectHeader.module.css @@ -0,0 +1,96 @@ +/* components/ProjectHeader/ProjectHeader.module.css */ +.projectHeader { + display: flex; + justify-content: space-between; + align-items: flex-start; + background-color: var(--white-color); /* Use variable */ + padding: var(--spacing-lg); /* Use variable */ + border-radius: var(--border-radius-lg); /* Use variable */ + margin-bottom: var(--spacing-xl); /* Use variable */ + box-shadow: var(--shadow-md); /* Use variable */ + gap: var(--spacing-lg); /* Use variable */ + border: 1px solid var(--border-color); /* Add subtle border */ +} + +.projectInfo { + flex: 1; + min-width: 0; +} + +.projectName { + margin: 0 0 var(--spacing-sm) 0; /* Use variable */ + font-size: 1.8em; + font-weight: 600; /* Bolder */ + color: var(--text-color-primary); /* Use variable */ + line-height: 1.2; +} + +.projectDescription { + margin: 0; + color: var(--text-color-secondary); /* Use variable */ + line-height: 1.6; /* Increase line height */ +} + +.wordCloudContainer { + flex: 0 0 35%; + min-width: 250px; /* Increase min-width slightly */ + /* border-left: 1px solid var(--border-color); */ /* Remove border, use spacing */ + /* padding-left: var(--spacing-lg); */ /* Remove padding, rely on gap */ + background-color: var(--light-color); /* Subtle background for contrast */ + padding: var(--spacing-md); /* Add padding inside the container */ + border-radius: var(--border-radius-md); /* Round corners */ +} + +.wordCloudTitle { + font-size: 1.0em; /* Smaller title */ + font-weight: 600; + color: var(--text-color-secondary); /* Use variable */ + margin-top: 0; + margin-bottom: var(--spacing-md); /* Use variable */ + text-transform: uppercase; /* Uppercase for style */ + letter-spacing: 0.5px; +} + +.wordCloud { + min-height: 100px; + line-height: 1.9; /* Adjust for better spacing */ + text-align: center; + /* Add some visual style */ + filter: saturate(1.1); /* Slightly more vibrant colors */ +} + +.wordCloud span { /* Style individual words */ + cursor: default; /* Indicate non-interactive */ + transition: var(--transition-fast); + color: var(--secondary-color); /* Base color */ +} +/* Optional: Hover effect for words */ +/* .wordCloud span:hover { + color: var(--primary-color); + transform: scale(1.1); +} */ + + +.wordCloudPlaceholder { + color: var(--text-color-secondary); + opacity: 0.7; /* Make placeholder less prominent */ + font-style: italic; + text-align: center; + padding-top: 20px; + font-size: 0.9em; +} + + +/* Responsive adjustments */ +@media (max-width: 900px) { + .projectHeader { + flex-direction: column; + align-items: stretch; + } + .wordCloudContainer { + flex-basis: auto; + margin-top: var(--spacing-lg); + /* border-top: 1px solid var(--border-color); */ /* Remove top border */ + /* padding-top: var(--spacing-lg); */ + } +} diff --git a/frontend_react/src/components/UrlCard/UrlCard.jsx b/frontend_react/src/components/UrlCard/UrlCard.jsx new file mode 100644 index 0000000..5cd0f7d --- /dev/null +++ b/frontend_react/src/components/UrlCard/UrlCard.jsx @@ -0,0 +1,164 @@ +// frontend/src/components/UrlCard/UrlCard.jsx +import React from 'react'; +import { useSortable } from '@dnd-kit/sortable'; +import { CSS } from '@dnd-kit/utilities'; +import styles from './UrlCard.module.css'; +import { FaEdit, FaTrashAlt, FaBars, FaSyncAlt, FaSpinner, FaExclamationTriangle, FaTag, FaStar, FaRegStar, FaStickyNote } from 'react-icons/fa'; + +// Simple Keyword Tag component +const KeywordTag = ({ keyword }) => ( + {keyword.word} +); + +/** + * UrlCard Component + * Displays URL info, handles drag-and-drop, and now triggers onViewDetails on click. + */ +function UrlCard({ + id, + url, + title, + summary, + keywords, + processingStatus, + favicon, + starred, + note, + isLoading, + onDelete, + onRegenerate, + onViewDetails, // Accept the new prop + // Add handlers for starring/editing notes if implemented +}) { + + const { + attributes, + listeners, // listeners for drag handle + setNodeRef, + transform, + transition, + isDragging + } = useSortable({ id: id }); + + const style = { + transform: CSS.Transform.toString(transform), + transition, + opacity: isDragging ? 0.8 : 1, + marginBottom: '15px', + position: 'relative', + zIndex: isDragging ? 100 : 'auto', + borderLeft: processingStatus === 'pending' ? '3px solid orange' : (processingStatus === 'failed' ? '3px solid red' : '3px solid transparent'), + }; + + const handleEdit = (e) => { + e.stopPropagation(); // Prevent card click when clicking button + console.log("Edit clicked for:", id); + alert(`Edit Note/Details for URL ID: ${id} (Placeholder)`); + }; + + const handleDelete = (e) => { + e.stopPropagation(); // Prevent card click when clicking button + onDelete(); // Call original delete handler + } + + const handleRegenerate = (e) => { + e.stopPropagation(); // Prevent card click when clicking button + onRegenerate(); // Call original regenerate handler + } + + const handleStarClick = (e) => { + e.stopPropagation(); // Prevent card click when clicking button + // TODO: Implement star toggling logic + API call + console.log("Star clicked for:", id); + alert(`Toggle star for ${id} (Placeholder)`); + } + + // Determine content based on processing status + let cardBody; + if (processingStatus === 'pending') { + cardBody =
Processing...
; + } else if (processingStatus === 'failed') { + cardBody =
Processing Failed
; + } else { // completed or undefined + cardBody = ( + <> +

{summary || 'No summary available.'}

+ {keywords && keywords.length > 0 && ( +
+ + {keywords.slice(0, 5).map((kw, index) => )} + {keywords.length > 5 && ...} +
+ )} + {note && ( +
+ + {note} +
+ )} + + ); + } + + // --- Click handler for the main content area --- + const handleCardClick = () => { + // Only navigate if not dragging + if (!isDragging && onViewDetails) { + onViewDetails(); // Call the handler passed from MainContent + } + } + + return ( + // setNodeRef and attributes for dnd-kit Sortable +
+ {isLoading && ( +
+ +
+ )} + + {/* Left side: Buttons */} +
+ {favicon && favicon e.target.style.display='none'}/>} + + +
+ + {/* Center: Main Content - Make this part clickable */} +
{/* Add onClick and pointer */} +
+ {title || 'No Title'} + {/* Make URL link not trigger card click? Optional, but often good UX */} + e.stopPropagation()}> + {url} + +
+ + +
+
+ {cardBody} +
+ + {/* Right side: Drag Handle - Use listeners from useSortable */} +
+ +
+
+ ); +} + +export default UrlCard; diff --git a/frontend_react/src/components/UrlCard/UrlCard.module.css b/frontend_react/src/components/UrlCard/UrlCard.module.css new file mode 100644 index 0000000..6e478ec --- /dev/null +++ b/frontend_react/src/components/UrlCard/UrlCard.module.css @@ -0,0 +1,262 @@ +/* components/UrlCard/UrlCard.module.css */ +.card { + background-color: #ffffff; + border-radius: 8px; + box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1); + /* padding: 15px 20px; */ /* Padding moved to inner columns */ + display: flex; + align-items: stretch; /* Make columns same height */ + /* gap: 15px; */ /* Replaced by padding on columns */ + position: relative; /* For spinner overlay */ + transition: box-shadow 0.2s ease, border-left 0.3s ease; /* Added border transition */ + overflow: hidden; /* Prevent content spillover */ +} + +.card:hover { + box-shadow: 0 3px 6px rgba(0, 0, 0, 0.1); +} + +.card.loading { + /* Opacity handled by spinner overlay now */ + pointer-events: none; /* Prevent interaction while API call is loading */ +} + +.spinnerOverlay { + position: absolute; + top: 0; + left: 0; + right: 0; + bottom: 0; + background-color: rgba(255, 255, 255, 0.7); + display: flex; + justify-content: center; + align-items: center; + z-index: 10; + border-radius: 8px; /* Match card radius */ +} + +.spinnerIcon { + font-size: 1.5em; + color: #007bff; + animation: spin 1s linear infinite; +} +.spinnerIconSmall { + font-size: 1em; /* Smaller spinner for inline status */ + color: #007bff; + animation: spin 1s linear infinite; + margin-right: 5px; +} + +@keyframes spin { + 0% { transform: rotate(0deg); } + 100% { transform: rotate(360deg); } +} + +/* --- Column Structure --- */ +.leftColumn { + display: flex; + flex-direction: column; + align-items: center; + justify-content: flex-start; + padding: var(--spacing-md) var(--spacing-sm); + gap: var(--spacing-md); + border-right: 1px solid var(--border-color); /* Use variable */ + flex-shrink: 0; +} + +.cardContent { + flex-grow: 1; /* Takes up most space */ + min-width: 0; /* Prevent overflow */ + padding: 15px; /* Padding */ +} + +.dragHandle { + display: flex; + align-items: center; /* Center icon vertically */ + justify-content: center; + cursor: grab; + color: #adb5bd; /* Light color for handle */ + padding: 15px 10px; /* Padding */ + border-left: 1px solid #eee; /* Separator line */ + flex-shrink: 0; /* Prevent shrinking */ +} +/* --- End Column Structure --- */ + + +.cardHeader { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 10px; + gap: 10px; /* Space between header elements */ + flex-wrap: wrap; /* Allow wrapping on smaller widths within card */ +} + +.cardTitle { /* New style for title */ + font-weight: 600; /* Make title slightly bolder */ + color: #333; + margin-right: auto; /* Push URL and actions to the right */ + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; +} + + +.urlLink { + color: #007bff; + text-decoration: none; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; /* Truncate long URLs */ + flex-shrink: 1; /* Allow URL to shrink */ + min-width: 50px; /* Prevent URL from becoming too small */ + margin-left: 10px; /* Space from title */ +} + +.urlLink:hover { + text-decoration: underline; +} + +.cardActions { + display: flex; + align-items: center; + gap: 8px; /* Space between action icons */ + flex-shrink: 0; /* Prevent actions from shrinking */ +} + +.iconButton { + background: none; + border: none; + color: var(--text-color-secondary); /* Use variable */ + cursor: pointer; + padding: 5px; + font-size: 0.95em; + transition: var(--transition-fast); /* Use variable */ + line-height: 1; + border-radius: 50%; +} +.iconButton:hover { + color: var(--text-color-primary); /* Use variable */ + background-color: var(--light-color); /* Use variable */ +} +.iconButton:disabled { + color: #adb5bd; + cursor: not-allowed; + background-color: transparent !important; +} + +/* --- Specific AI Regen Button Style --- */ +/* Target the regen button specifically if possible, otherwise rely on its position/icon */ +/* Assuming it's the last button in leftColumn for now */ +.leftColumn > .iconButton:last-child { /* Example selector */ + color: var(--ai-text); /* Use AI text color */ +} +.leftColumn > .iconButton:last-child:hover { + color: var(--ai-text-hover); /* Use AI hover text color */ + background-color: rgba(155, 53, 53, 0.1); /* Subtle reddish background on hover */ +} +.leftColumn > .iconButton:last-child:disabled { + color: #c7a9a9; /* Muted red when disabled */ + background-color: transparent !important; +} +/* --- End AI Regen Button Style --- */ + +.deleteButton:hover { + color: #dc3545; /* Red for delete */ +} + +.starButton { + color: #ffc107; /* Yellow for stars */ +} +.starButton:hover { + color: #e0a800; +} + + +.summary { + color: #555; + font-size: 0.95em; + line-height: 1.5; + margin: 0 0 10px 0; /* Add bottom margin */ +} + +.dragHandle:active { + cursor: grabbing; +} + +/* --- New Styles for Status, Keywords, Favicon, Note, Star --- */ +.statusInfo { + display: flex; + align-items: center; + justify-content: center; + padding: 20px; + font-style: italic; + color: #666; + background-color: #f8f9fa; + border-radius: 4px; + min-height: 50px; /* Give it some height */ +} + +.errorIcon { + color: #dc3545; /* Red for error */ + margin-right: 5px; +} + +.keywordsContainer { + margin-top: 10px; + display: flex; + align-items: center; + flex-wrap: wrap; + gap: 5px; +} + +.keywordIcon { + color: #6c757d; + margin-right: 5px; + font-size: 0.9em; +} + +.keywordTag { + background-color: #e9ecef; + color: #495057; + padding: 2px 6px; + border-radius: 10px; /* Pill shape */ + font-size: 0.8em; + white-space: nowrap; +} + +.moreKeywords { + font-size: 0.8em; + color: #6c757d; +} + +.favicon { + width: 16px; + height: 16px; + object-fit: contain; + /* margin-right: 8px; */ /* Spacing handled by leftColumn gap */ +} + +.noteContainer { + margin-top: 10px; + padding-top: 10px; + border-top: 1px dashed #eee; + font-size: 0.9em; + color: #666; + display: flex; + align-items: flex-start; + gap: 5px; +} + +.noteIcon { + color: #6c757d; + margin-top: 2px; /* Align icon nicely */ + flex-shrink: 0; +} +.noteText { + white-space: pre-wrap; /* Respect line breaks in notes */ +} + + +/* --- End New Styles --- */ + diff --git a/frontend_react/src/components/UrlCardList/UrlCardList.jsx b/frontend_react/src/components/UrlCardList/UrlCardList.jsx new file mode 100644 index 0000000..be2f90d --- /dev/null +++ b/frontend_react/src/components/UrlCardList/UrlCardList.jsx @@ -0,0 +1,92 @@ +// frontend/src/components/UrlCardList/UrlCardList.jsx +import React from 'react'; +import { + DndContext, + closestCenter, + KeyboardSensor, + PointerSensor, + useSensor, + useSensors, +} from '@dnd-kit/core'; +import { + arrayMove, + SortableContext, + sortableKeyboardCoordinates, + verticalListSortingStrategy, +} from '@dnd-kit/sortable'; + +import UrlCard from '../UrlCard/UrlCard.jsx'; +import styles from './UrlCardList.module.css'; + +/** + * UrlCardList Component + * Renders the list and handles drag-and-drop. + * Now accepts and passes down onViewDetails prop. + */ +// Accept onViewDetails prop +function UrlCardList({ urls, onOrderChange, onDelete, onRegenerate, onViewDetails }) { + + const sensors = useSensors( + useSensor(PointerSensor), + useSensor(KeyboardSensor, { + coordinateGetter: sortableKeyboardCoordinates, + }) + ); + + const handleDragEnd = (event) => { + const { active, over } = event; + if (over && active.id !== over.id) { + const oldIndex = urls.findIndex((url) => url.id === active.id); + const newIndex = urls.findIndex((url) => url.id === over.id); + if (oldIndex === -1 || newIndex === -1) { + console.error("Could not find dragged item index"); + return; + } + const newOrder = arrayMove(urls, oldIndex, newIndex); + onOrderChange(newOrder); + } + }; + + if (!urls || urls.length === 0) { + return
No URLs added to this project yet. Start by adding one below!
; + } + + const urlIds = urls.map(url => url.id); + + return ( + + +
+ {urls.map((url) => ( + onDelete(url.id)} + onRegenerate={() => onRegenerate(url.id)} + onViewDetails={() => onViewDetails(url.id)} // Pass onViewDetails down + /> + ))} +
+
+
+ ); +} + +export default UrlCardList; + diff --git a/frontend_react/src/components/UrlCardList/UrlCardList.module.css b/frontend_react/src/components/UrlCardList/UrlCardList.module.css new file mode 100644 index 0000000..038cf19 --- /dev/null +++ b/frontend_react/src/components/UrlCardList/UrlCardList.module.css @@ -0,0 +1,16 @@ +/* components/UrlCardList/UrlCardList.module.css */ +.urlCardList { + /* Container for the list */ + margin-top: 20px; /* Space below header */ + flex-grow: 1; /* Allows list to take available space if MainContent is flex */ +} + +.emptyList { + text-align: center; + color: #888; + font-style: italic; + padding: 40px 20px; + border: 2px dashed #e0e0e0; + border-radius: 8px; + background-color: #fafafa; +} diff --git a/frontend_react/src/components/UrlDetailPage/UrlDetailPage.jsx b/frontend_react/src/components/UrlDetailPage/UrlDetailPage.jsx new file mode 100644 index 0000000..56e61e0 --- /dev/null +++ b/frontend_react/src/components/UrlDetailPage/UrlDetailPage.jsx @@ -0,0 +1,308 @@ +import React, { useState, useEffect, useCallback } from 'react'; +import { fetchUrlDetails, updateUrlDetails } from '../../services/api'; // Import API functions +import styles from './UrlDetailPage.module.css'; // We'll create this CSS module next +import { FaLink, FaStar, FaRegStar, FaStickyNote, FaTags, FaInfoCircle, FaSpinner, FaExclamationTriangle, FaCalendarAlt, FaSave, FaTimes, FaEdit, FaCheckCircle } from 'react-icons/fa'; // Import icons + +// Helper to format date strings +const formatDate = (dateString) => { + if (!dateString) return 'N/A'; + try { + // Assuming dateString is ISO 8601 UTC (ends with Z) + return new Date(dateString).toLocaleString(undefined, { + year: 'numeric', month: 'short', day: 'numeric', + hour: '2-digit', minute: '2-digit' + }); + } catch (e) { + return dateString; // Return original if formatting fails + } +}; + +// Simple Keyword Tag component (can be shared or kept local) +const KeywordTag = ({ keyword }) => ( + + {keyword.word} + +); + +/** + * UrlDetailPage Component + * Fetches, displays, and allows editing of URL details. + * Expects `urlId` prop and an `onBack` function prop to navigate back. + */ +function UrlDetailPage({ urlId, onBack }) { + const [urlData, setUrlData] = useState(null); // Original fetched data + const [isLoading, setIsLoading] = useState(true); + const [error, setError] = useState(null); + const [isSaving, setIsSaving] = useState(false); // State for save operation + const [saveSuccess, setSaveSuccess] = useState(false); // State for success message + + // --- State for Editing --- + const [isEditing, setIsEditing] = useState(false); + const [editedTitle, setEditedTitle] = useState(''); + const [editedSummary, setEditedSummary] = useState(''); + const [editedNote, setEditedNote] = useState(''); + // --- End Editing State --- + + // Fetch data when urlId changes + const loadUrlData = useCallback(() => { + if (!urlId) { + setError("No URL ID provided."); + setIsLoading(false); + setUrlData(null); + return; + } + console.log(`UrlDetailPage: Fetching details for URL ID: ${urlId}`); + setIsLoading(true); + setError(null); + setUrlData(null); + setIsEditing(false); + + fetchUrlDetails(urlId) + .then(data => { + setUrlData(data); + // Initialize edit state when data loads (also ensures reset if data reloads) + setEditedTitle(data?.title || ''); + setEditedSummary(data?.summary || ''); + setEditedNote(data?.note || ''); + setIsLoading(false); + }) + .catch(err => { + console.error(`UrlDetailPage: Failed to fetch URL details for ${urlId}:`, err); + setError(err.message || "Failed to load URL details."); + setIsLoading(false); + }); + }, [urlId]); + + useEffect(() => { + loadUrlData(); + }, [loadUrlData]); + + // --- Edit Mode Handlers --- + const handleEdit = () => { + if (!urlData) return; + // Re-initialize edit fields with current data when entering edit mode + setEditedTitle(urlData.title || ''); + setEditedSummary(urlData.summary || ''); + setEditedNote(urlData.note || ''); + setIsEditing(true); + setSaveSuccess(false); + setError(null); + }; + + const handleCancel = () => { + setIsEditing(false); + setError(null); + // No need to reset fields explicitly, they will be re-initialized + // from urlData next time edit is clicked. + }; + + const handleSave = async () => { + if (!urlData) return; + setIsSaving(true); + setError(null); + setSaveSuccess(false); + + const updateData = { + title: editedTitle, + summary: editedSummary, + note: editedNote, + }; + + try { + // Pass only changed data (optional optimization, backend handles it) + const changedData = {}; + if (editedTitle !== urlData.title) changedData.title = editedTitle; + if (editedSummary !== urlData.summary) changedData.summary = editedSummary; + if (editedNote !== urlData.note) changedData.note = editedNote; + + if (Object.keys(changedData).length === 0) { + console.log("No changes detected, exiting edit mode."); + setIsEditing(false); + setIsSaving(false); + return; // No need to call API if nothing changed + } + + + const updatedUrl = await updateUrlDetails(urlId, changedData); // Send only changed data + // Update local state with the response from the API OR merge changes + // Merging changes locally might be smoother if API doesn't return full object + setUrlData(prevData => ({ + ...prevData, + ...changedData // Apply local changes directly + // Alternatively, if API returns full updated object: ...updatedUrl + })); + setIsEditing(false); + setSaveSuccess(true); + setTimeout(() => setSaveSuccess(false), 2500); + } catch (err) { + console.error("UrlDetailPage: Failed to save URL details:", err); + setError(err.message || "Failed to save changes."); + } finally { + setIsSaving(false); + } + }; + // --- End Edit Mode Handlers --- + + // --- Star Toggle Handler --- + const handleToggleStar = async () => { + if (!urlData || isSaving || isEditing) return; + + const newStarredStatus = !urlData.starred; + const originalStatus = urlData.starred; + setUrlData(prevData => ({ ...prevData, starred: newStarredStatus })); + + try { + await updateUrlDetails(urlId, { starred: newStarredStatus }); + } catch (err) { + console.error("UrlDetailPage: Failed to update star status:", err); + setUrlData(prevData => ({ ...prevData, starred: originalStatus })); + alert(`Failed to update star status: ${err.message}`); + } + }; + // --- End Star Toggle Handler --- + + // --- Render states --- + if (isLoading) { + return
Loading URL Details...
; + } + if (error && !isEditing) { + return ( +
+ {error} + {onBack && } +
+ ); + } + if (!urlData && !isLoading) { + return
URL data not available.
; + } + + // --- Render URL Details --- + return ( +
+ {onBack && ( // Always show back button? + + )} + + {/* Header: Title, Favicon, Star, Edit/Save/Cancel */} +
+ {urlData.favicon && favicon e.target.style.display='none'}/>} + + {/* Editable Title */} + {isEditing ? ( + setEditedTitle(e.target.value)} + className={`${styles.titleInput} ${styles.inputField}`} + disabled={isSaving} + aria-label="URL Title" + /> + ) : ( +

{urlData.title || 'No Title'}

+ )} + + + +
+ {isEditing ? ( + <> + + + + ) : ( + + )} +
+
+ + {/* Display Save Error/Success Messages */} + {error && isEditing &&

{error}

} + {saveSuccess &&

Saved successfully!

} + + + + {urlData.url} + + + {/* Metadata Section */} +
+ + Status: {urlData.processingStatus} + + + Updated: {formatDate(urlData.updatedAt)} + + + Added: {formatDate(urlData.createdAt)} + +
+ + {/* Summary Section (Editable) */} +
+

Summary

+ {isEditing ? ( +