diff --git a/CTFd_github_backup/README.md b/CTFd_github_backup/README.md new file mode 100644 index 0000000..a48ef02 --- /dev/null +++ b/CTFd_github_backup/README.md @@ -0,0 +1,149 @@ +# Github backup +Import challenges from Github to CTFd + +## 1. Create the GitHub App on GitHub +From Settings -> Developer Settings -> Github App -> New Github App. + +Complete the form with the application details: +- Name of the Github App +- Description +- Homepage URL: URL of the deployed platform (or localhost) +- Setup URL: URL of the application to which the user will be redirected after completing the installation. For example: `http://localhost:4000/admin/plugins/github_backup` +- Uncheck webhooks +- In the permissions section on repositories: + - Contents -> Read-only + - Metadata -> Read-only +- Choose that the Github App can only be installed on our Github account. + +Once created, you must modify it and create the Private Key. This private key must be downloaded and its content saved in the plugin's `config.py` file. +You must also indicate the application ID and the installation URL of the Github App in the `config.py` file. + +**Note:** Write the private key on a single line, replacing each line break in the original format with `\n`. + +Example of `config.py`: +```python +config = { + "GITHUB_APP_ID": 0000000, + "GITHUB_APP_INSTALLATION_URL": "https://github.com/apps/github-app-name/installations/new", + "GITHUB_APP_PRIVATE_KEY": "-----BEGIN RSA PRIVATE KEY-----\nexample\n-----END RSA PRIVATE KEY-----" +} +``` + +## 2. Install the GitHub App on our GitHub account +From the CTFd application, go to the application's administration panel and select Plugins -> Github backup. + +The first step is to click on the “1. Install Github App” button. + +You can only have one active installation per Github App. + +In the window, you can choose whether to give access permission to all repositories or only to some of them. + +After completing the installation, you will be redirected back to the CTFd platform. + +The second step is to click on the “2. Link installation” button to save the installation token in the database. + +## 3. Import challenges +Once the installation is complete, a list of repositories that the GitHub App has access to will be displayed. + +Select the ones you want to save and click the “Save selected repositories” button. + +The “Saved Repositories” table will show the repositories that have been saved. It also shows the date of the last import and the “Import” (or ‘Update’ if a first import has already been made from that repository) and “Delete” buttons. +Deleting a repository from the table does not delete the challenges that have been imported from that repository. The challenges will remain as not imported from GitHub. + +You can choose what to do with challenges imported from GitHub that have been deleted from the repository when you perform an “Update.” You can keep them (they will remain as not imported from GitHub) or delete them. + +To import from several repositories at once, check the checkboxes for the repositories and click the “Import selected repositories” button. + +You can download a sample JSON file from the “Download an example” button to see the structure that each challenge should have. + +Example of JSON schema: +```json +{ + "challenge": { + "uuid": "000000000000000", + "name": "knock, knock, Neo", + "description": "Wake up. The matrix has you...", + "attribution": "author", + "connection_info": "https://link.com", + "max_attempts": 3, + "value": 50, + "category": "web", + "type": "standard", + "state": "visibe or hidden", + "flags": [ + { + "uuid": "000000000000000", + "type": "static", + "content": "flag{answer}", + "data": "case_insensitive" + }, + { + "uuid": "000000000000000", + "type": "regex", + "content": "flag{.a*}", + "data": "" + } + ], + "tags": [ + "tag1", + "tag2" + ], + "hints": [ + { + "uuid": "000000000000000", + "title": "Hint 1", + "type": "standard", + "content": "Follow the white rabbit...", + "cost": 10 + }, + { + "uuid": "000000000000000", + "title": "Hint 2", + "type": "?", + "content": "?", + "cost": 20 + } + ] + } +} + +``` + +**Fields and permitted values** +Required fields are marked with an `*`. + +For each challenge: +- uuid * +- name * +- description * +- attribution +- connection_info +- max_attemps +- value * +- category +- type *: standard (dynamic type not implemented yet) +- state *: visible or hidden + +Tags must be text strings. + +The fields for each flag are: +- uuid * +- type *: static or regex +- content * +- data: case_insensitive or empty (for case sensitive) + +The fields for each hint are: +- uuid * +- title * +- type: standard * +- content * +- cost * + + +## 4. Export challenges +The last section of the page shows a table with all the challenges on the platform. This includes those that have been imported from the Github App and those that have been created from the platform. +Challenges can be exported individually or in groups. Individually exported challenges are downloaded as JSON files. Group exports download a zip file with one JSON file for each challenge. + +For challenges that have not been imported from the Github App, a UUID will be generated in the necessary fields. + + diff --git a/CTFd_github_backup/__init__.py b/CTFd_github_backup/__init__.py new file mode 100644 index 0000000..673e578 --- /dev/null +++ b/CTFd_github_backup/__init__.py @@ -0,0 +1,34 @@ +# plugins/github_backup/__init__.py +from pathlib import Path + +from jinja2 import ChoiceLoader, FileSystemLoader +from CTFd.plugins import register_plugin_assets_directory +from .blueprints import my_bp +from CTFd.plugins.github_backup.models import UserGitHubToken, GithubRepositories, GithubChallengeSync, GithubFlagSync, GithubHintSync + + +def load(app): + """ + CTFd ejecuta esto al arrancar el plugin. + """ + + # 1) Blueprint del plugin + app.register_blueprint(my_bp) + + from CTFd.models import db + db.create_all() + + # 2) Loader de plantillas del plugin + plugin_templates_path = Path(__file__).parent / "templates" + plugin_loader = FileSystemLoader(str(plugin_templates_path)) + + # Si el core ya tiene un ChoiceLoader, lo ampliamos; si no, creamos uno nuevo + if isinstance(app.jinja_loader, ChoiceLoader): + # Insertamos nuestro loader en primer lugar (mayor prioridad) + app.jinja_loader.loaders.insert(0, plugin_loader) + else: + # El core trae un FileSystemLoader simple ➜ construimos un ChoiceLoader + app.jinja_loader = ChoiceLoader([plugin_loader, app.jinja_loader]) + + # 3) Assets estáticos del plugin + register_plugin_assets_directory(app, base_path="/plugins/github_backup/assets/") diff --git a/CTFd_github_backup/__pycache__/__init__.cpython-311.pyc b/CTFd_github_backup/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..0d78b7d Binary files /dev/null and b/CTFd_github_backup/__pycache__/__init__.cpython-311.pyc differ diff --git a/CTFd_github_backup/__pycache__/blueprints.cpython-311.pyc b/CTFd_github_backup/__pycache__/blueprints.cpython-311.pyc new file mode 100644 index 0000000..67b6200 Binary files /dev/null and b/CTFd_github_backup/__pycache__/blueprints.cpython-311.pyc differ diff --git a/CTFd_github_backup/__pycache__/config.cpython-311.pyc b/CTFd_github_backup/__pycache__/config.cpython-311.pyc new file mode 100644 index 0000000..d179100 Binary files /dev/null and b/CTFd_github_backup/__pycache__/config.cpython-311.pyc differ diff --git a/CTFd_github_backup/__pycache__/expot_data.cpython-311.pyc b/CTFd_github_backup/__pycache__/expot_data.cpython-311.pyc new file mode 100644 index 0000000..856a2ba Binary files /dev/null and b/CTFd_github_backup/__pycache__/expot_data.cpython-311.pyc differ diff --git a/CTFd_github_backup/__pycache__/import_data.cpython-311.pyc b/CTFd_github_backup/__pycache__/import_data.cpython-311.pyc new file mode 100644 index 0000000..5c4cec8 Binary files /dev/null and b/CTFd_github_backup/__pycache__/import_data.cpython-311.pyc differ diff --git a/CTFd_github_backup/__pycache__/utils.cpython-311.pyc b/CTFd_github_backup/__pycache__/utils.cpython-311.pyc new file mode 100644 index 0000000..14c91c3 Binary files /dev/null and b/CTFd_github_backup/__pycache__/utils.cpython-311.pyc differ diff --git a/CTFd_github_backup/__pycache__/validate_data.cpython-311.pyc b/CTFd_github_backup/__pycache__/validate_data.cpython-311.pyc new file mode 100644 index 0000000..07594cd Binary files /dev/null and b/CTFd_github_backup/__pycache__/validate_data.cpython-311.pyc differ diff --git a/CTFd_github_backup/blueprints.py b/CTFd_github_backup/blueprints.py new file mode 100644 index 0000000..0ba977b --- /dev/null +++ b/CTFd_github_backup/blueprints.py @@ -0,0 +1,453 @@ +from CTFd.models import Challenges +from CTFd.utils.decorators import admins_only +from CTFd.utils.user import get_current_user + +from CTFd.plugins.github_backup.models import db, GithubRepositories, GithubChallengeSync, GithubFlagSync, GithubHintSync, UserGitHubToken +from CTFd.plugins.github_backup.expot_data import is_imported_from_github, prepare_json +from CTFd.plugins.github_backup.import_data import import_challenges_from_repo +from CTFd.plugins.github_backup.config import config + +from flask import Blueprint, render_template, request, Response, send_file, redirect +from datetime import datetime +import requests +import io +import zipfile +import time +import jwt +import json +import pytz + +my_bp = Blueprint( + "github_backup", + __name__, + template_folder="templates", + static_folder="static", + static_url_path="/plugins/github_backup/static", + url_prefix="" +) + +@my_bp.route("/admin/plugins/github_backup") +@admins_only +def get_template(): + """ + Render the GitHub Backup Plugin template for admins. + """ + installation_url = config["GITHUB_APP_INSTALLATION_URL"] + return render_template("admin/plugins/github_backup.html", installation_url=installation_url) + + +def generate_jwt(): + """ + Generates a JWT for the GitHub App. + """ + app_id = config["GITHUB_APP_ID"] + private_key = config["GITHUB_APP_PRIVATE_KEY"] + + payload = { + "iat": int(time.time()), + "exp": int(time.time()) + 600, + "iss": app_id + } + + return jwt.encode(payload, private_key, algorithm="RS256") + + +def get_installation_access_token(installation_id): + """ + Retrieves an installation access token for a specified GitHub App installation. + """ + jwt_token = generate_jwt() + + headers = { + "Authorization": f"Bearer {jwt_token}", + "Accept": "application/vnd.github+json" + } + + url = f"https://api.github.com/app/installations/{installation_id}/access_tokens" + r = requests.post(url, headers=headers) + + if r.status_code != 201: + print("Error:", r.status_code, r.text) + return None + + return r.json().get("token") + + +@my_bp.route("/plugins/github_backup/installations", methods=["GET"]) +@admins_only +def link_installation(): + """ + Handles the linking of a GitHub app installation to a user by retrieving + the installation's ID from the GitHub API and storing it in the database associated + with the currently logged-in user. It ensures that valid authentication is used + for communication with the GitHub API using a JWT token. + """ + jwt_token = generate_jwt() + + headers = { + "Authorization": f"Bearer {jwt_token}", + "Accept": "application/vnd.github+json" + } + + r = requests.get("https://api.github.com/app/installations", headers=headers) + + if r.status_code != 200: + return {"success": False, "message": "Error retrieving installations"}, 400 + + installations = r.json() + if not isinstance(installations, list): + return {"success": False, "message": "Unexpected response"}, 400 + + if len(installations) == 1: + installation_id = installations[0]["id"] + else: + return {"success": False, "message": "Multiple installations.", "r": r.json()}, 400 + + user_id = get_current_user().id + token_entry = UserGitHubToken.query.filter_by(user_id=user_id).first() + + if token_entry: + token_entry.token = installation_id + else: + token_entry = UserGitHubToken(user_id=user_id, token=installation_id) + db.session.add(token_entry) + + db.session.commit() + + return {"success": True, "message": f"Installation ID saved correctly."} + + +@my_bp.route("/plugins/github_backup/repos", methods=["GET"]) +@admins_only +def get_repos(): + """ + Retrieves all repositories associated with the + installation. Returns the repository details, including their IDs, names, and full names. + """ + user_id = get_current_user().id + token_entry = UserGitHubToken.query.filter_by(user_id=user_id).first() + + if not token_entry: + return {"success": False, "message": "Installation ID not found"}, 401 + + installation_id = get_installation_access_token(token_entry.token) + + if not installation_id: + return {"success": False, "message": "Could not obtain installation ID"}, 400 + + headers = { + "Authorization": f"token {installation_id}", + "Accept": "application/vnd.github+json" + } + + github_api_url = "https://api.github.com/installation/repositories" + all_repos = [] + page = 1 + + while True: + response = requests.get( + f"{github_api_url}?per_page=100&page={page}", + headers=headers + ) + + if response.status_code != 200: + return { + "success": False, + "message": "Could not retrieve repositories", + "details": response.json() + }, 400 + + repos_page = response.json().get("repositories", []) + all_repos.extend(repos_page) + + if "next" not in response.links: + break + + page += 1 + + repo_names = [ + {"id": r["id"], "name": r["name"], "full_name": r["full_name"]} + for r in all_repos + ] + + return {"success": True, "repos": repo_names} + + +@my_bp.route("/plugins/github_backup/repos/selection", methods=["POST"]) +@admins_only +def save_selected_repos(): + """ + Processes a POST request containing a list of repositories and + saves them if they are not already stored for the user in the database. Each + repository is stored with its associated details if it does not already + exist. All changes are committed to the database after processing. + """ + data = request.get_json() + selected_repos = data.get("repos", []) + user = get_current_user() + + if not isinstance(selected_repos, list): + return {"success": False, "message": "Invalid data format"}, 400 + + for repo in selected_repos: + existing = GithubRepositories.query.filter_by( + user_id=user.id, + github_repo_id=repo["id"] + ).first() + + if not existing: + new_repo = GithubRepositories( + user_id=user.id, + github_repo_id=repo["id"], + name=repo["name"], + full_name=repo["full_name"], + selected=False, + last_synced_at=None + ) + db.session.add(new_repo) + + db.session.commit() + + return {"success": True, "message": "Repositories saved correctly"} + + +@my_bp.route("/plugins/github_backup/repos/saved", methods=["GET"]) +@admins_only +def list_saved_repos(): + """ + This function retrieves all GitHub repositories that are saved by the currently + authenticated user. + """ + user_id = get_current_user().id + saved_repos = GithubRepositories.query.filter_by(user_id=user_id).all() + + result = [] + for repo in saved_repos: + result.append({ + "id": repo.id, + "name": repo.name, + "full_name": repo.full_name, + "selected": repo.selected, + "last_synced_at": repo.last_synced_at.isoformat() if repo.last_synced_at else None, + }) + + return {"success": True, "repos": result} + + +@my_bp.route("/plugins/github_backup/repos/", methods=["DELETE"]) +@admins_only +def delete_repo(repo_id): + """ + Deletes a GitHub repository along with its related data stored in the database. + """ + user_id = get_current_user().id + repo = GithubRepositories.query.filter_by(id=repo_id, user_id=user_id).first() + + if not repo: + return {"success": False, "message": "Repository not found"}, 404 + + # Get and delete challenge syncs + challenge_syncs = GithubChallengeSync.query.filter_by(github_repo_id=repo.id).all() + for sync in challenge_syncs: + db.session.delete(sync) + + # Get and delete flag syncs + flag_syncs = GithubFlagSync.query.filter_by(github_repo_id=repo.id).all() + for sync in flag_syncs: + db.session.delete(sync) + + # Delete hint syncs + hint_syncs = GithubHintSync.query.filter_by(github_repo_id=repo.id).all() + for sync in hint_syncs: + db.session.delete(sync) + + db.session.delete(repo) + db.session.commit() + + return {"success": True, "message": "Repository and related data deleted correctly."} + + +@my_bp.route("/plugins/github_backup/repos//import", methods=["POST"]) +@admins_only +def import_from_repo(repo_id): + """ + Handles importing challenges from a specified GitHub repository using a user's GitHub token. + The function imports challenges, optionally deletes existing challenges based on the delete mode, + and updates the repository synchronization time. + """ + data = request.get_json() + delete_mode = data.get("delete_mode") + + try: + user_id = get_current_user().id + repo = GithubRepositories.query.filter_by(id=repo_id, user_id=user_id).first() + if not repo: + return {"success": False, "message": "Repository not found"}, 404 + + # Get token + token_entry = UserGitHubToken.query.filter_by(user_id=user_id).first() + if not token_entry: + return {"success": False, "message": "No GitHub token configured"}, 400 + access_token = get_installation_access_token(token_entry.token) + + result = import_challenges_from_repo(repo, access_token, overwrite_existing=True, delete_mode=delete_mode) + + repo.selected = True + repo.last_synced_at = datetime.now().astimezone(pytz.utc) + db.session.commit() + + return { + "success": result["success"], + "message": f"{result['created']} challenges imported, {result['updated']} challenges updated, {result['skipped']} already existing, {result['removed']} deleted", + "errors": result["errors"] + } + except Exception as e: + return {"success": False, "message": e}, 500 + + +@my_bp.route("/plugins/github_backup/challenge//download", methods=["GET"]) +@admins_only +def download_challenge(challenge_id: int) -> tuple[dict[str, bool | str], int] | Response: + """ + Handles the downloading of a specific challenge data in JSON format. The endpoint generates + a JSON file for the provided challenge ID and sends it as a downloadable attachment. The + JSON content is obtained from a helper function and formatted with UTF-8 encoding. + """ + + try: + data, name = prepare_json(challenge_id) + + json_bytes = json.dumps(data, indent=4, ensure_ascii=False).encode("utf-8-sig") + + return Response( + json_bytes, + mimetype="application/json", + headers={ + "Content-Disposition": f'attachment; filename="challenge_{name}.json"' + }, + ) + except ValueError as e: + return {"success": False, "message": str(e)}, 400 + except Exception as e: + return {"success": False, "message": f"Unexpected error: {str(e)}"}, 500 + + +@my_bp.route("/plugins/github_backup/challenges/download/example", methods=["GET"]) +@admins_only +def download_example_json(): + """ + Handles the download of an example JSON file for a challenge configuration. + """ + example = { + "challenge": { + "uuid": "000000000000000", + "name": "knock, knock, Neo", + "description": "Wake up. The matrix has you...", + "attribution": "author", + "connection_info": "https://link.com", + "max_attempts": 3, + "value": 50, + "category": "web", + "type": "standard", + "state": "visibe or hidden", + "flags": [ + { + "uuid": "000000000000000", + "type": "static", + "content": "flag{answer}", + "data": "case_insensitive", + }, + { + "uuid": "000000000000000", + "type": "regex", + "content": "flag{.a*}", + "data": "", + } + ], + "tags": [ + "tag1", "tag2" + ], + "hints": [ + { + "uuid": "000000000000000", + "title": "Hint 1", + "type": "standard", + "content": "Follow the white rabbit...", + "cost": 10 + }, + { + "uuid": "000000000000000", + "title": "Hint 2", + "type": "?", + "content": "?", + "cost": 20 + } + ], + } + } + + json_bytes = json.dumps(example, indent=4, ensure_ascii=False).encode("utf-8-sig") + + return Response( + json_bytes, + mimetype="application/json", + headers={ + "Content-Disposition": f'attachment; filename="challenge_example.json"' + }, + ) + + +@my_bp.route("/plugins/github_backup/challenges", methods=["GET"]) +@admins_only +def get_challenges(): + """ + Fetches a list of all challenges and their import status from GitHub. + """ + challenges = Challenges.query.all() + + data = [] + for challenge in challenges: + is_imported = is_imported_from_github(challenge.id) + data.append({ + "id": challenge.id, + "name": challenge.name, + "imported": is_imported, + }) + + return {"success": True, "challenges": data} + + +@my_bp.route("/plugins/github_backup/challenges/download", methods=["POST"]) +@admins_only +def download_multiple_challenges(): + """ + Handles a POST request to download multiple challenges as a ZIP archive. + """ + try: + challenge_ids = request.json.get("challenge_ids", []) + if not challenge_ids: + return {"success": False, "message": "No challenge IDs provided"}, 400 + + # Creamos un buffer en memoria + zip_buffer = io.BytesIO() + with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file: + for cid in challenge_ids: + try: + data, name = prepare_json(int(cid)) + json_bytes = json.dumps(data, indent=4, ensure_ascii=False).encode("utf-8-sig") + zip_file.writestr(f"challenge_{name}.json", json_bytes) + except Exception as e: + zip_file.writestr(f"challenge_{cid}_error.txt", str(e)) + + zip_buffer.seek(0) + + return send_file( + zip_buffer, + mimetype="application/zip", + as_attachment=True, + download_name="challenges_export.zip", + ) + + + except Exception as e: + return {"success": False, "message": f"Unexpected error: {str(e)}"}, 500 \ No newline at end of file diff --git a/CTFd_github_backup/config.json b/CTFd_github_backup/config.json new file mode 100644 index 0000000..cc6828d --- /dev/null +++ b/CTFd_github_backup/config.json @@ -0,0 +1,4 @@ +{ + "name": "Github backup", + "route": "/admin/plugins/github_backup" +} diff --git a/CTFd_github_backup/config.py b/CTFd_github_backup/config.py new file mode 100644 index 0000000..89d3055 --- /dev/null +++ b/CTFd_github_backup/config.py @@ -0,0 +1,5 @@ +config = { + "GITHUB_APP_ID": 0000000, + "GITHUB_APP_INSTALLATION_URL": "https://github.com/apps/github-app-name/installations/new", + "GITHUB_APP_PRIVATE_KEY": "-----BEGIN RSA PRIVATE KEY-----\nexample\n-----END RSA PRIVATE KEY-----" +} \ No newline at end of file diff --git a/CTFd_github_backup/expot_data.py b/CTFd_github_backup/expot_data.py new file mode 100644 index 0000000..6e9a77b --- /dev/null +++ b/CTFd_github_backup/expot_data.py @@ -0,0 +1,87 @@ +from CTFd.plugins.github_backup.models import GithubChallengeSync, GithubFlagSync, GithubHintSync +from CTFd.models import Tags, Flags, Hints, Challenges +from CTFd.plugins.github_backup.utils import generate_uuid + + +def is_imported_from_github(challenge_id: int) -> bool: + """ + Determines if a challenge is imported from GitHub. + """ + challenge_sync = GithubChallengeSync.query.filter_by(challenge_id=challenge_id).first() + return bool(challenge_sync) + + +def prepare_json(challenge_id: int) -> tuple[dict, str]: + """ + Generates a JSON-like structure and corresponding name for a given challenge. + """ + challenge = Challenges.query.filter_by(id=challenge_id).first() + if not challenge: + raise ValueError("Challenge not found") + + challenge_sync = GithubChallengeSync.query.filter_by(challenge_id=challenge.id).first() + + if not challenge_sync: + chellenge_uuid = generate_uuid() + else: + chellenge_uuid = challenge_sync.challenge_uuid + + # Prepare challenge data + data = { + "challenge": { + "uuid": chellenge_uuid, + "name": challenge.name, + "description": challenge.description, + "attribution": challenge.attribution, + "connection_info": challenge.connection_info, + "max_attempts": challenge.max_attempts, + "value": challenge.value, + "category": challenge.category, + "type": challenge.type, + "state": challenge.state, + "flags": [], + "tags": [], + "hints": [], + } + } + + # Prepare flags data + flags = Flags.query.filter_by(challenge_id=challenge.id).all() + for f in flags: + flag_sync = GithubFlagSync.query.filter_by(flag_id=f.id).first() + + if not flag_sync: + flag_uuid = generate_uuid() + else: + flag_uuid = flag_sync.flag_uuid + + data["challenge"]["flags"].append({ + "uuid": flag_uuid, + "type": f.type, + "content": f.content, + "data": f.data + }) + + # Prepare tags data + tags = Tags.query.filter_by(challenge_id=challenge.id).all() + data["challenge"]["tags"] = [t.value for t in tags] + + # Prepare hints data + hints = Hints.query.filter_by(challenge_id=challenge.id).all() + for h in hints: + hint_sync = GithubHintSync.query.filter_by(hint_id=h.id).first() + + if not hint_sync: + hint_uuid = generate_uuid() + else: + hint_uuid = hint_sync.hint_uuid + + data["challenge"]["hints"].append({ + "uuid": hint_uuid, + "title": h.content, + "type": h.type, + "content": h.content, + "cost": h.cost + }) + + return data, challenge.name diff --git a/CTFd_github_backup/import_data.py b/CTFd_github_backup/import_data.py new file mode 100644 index 0000000..aa86f0a --- /dev/null +++ b/CTFd_github_backup/import_data.py @@ -0,0 +1,351 @@ +from CTFd.models import Tags, Flags, Hints, Challenges +from CTFd.plugins.github_backup.validate_data import validate_tags_data, validate_flag_data, validate_hints_data, validate_challenge_data +from CTFd.plugins.github_backup.models import db, GithubChallengeSync, GithubFlagSync, GithubHintSync +from datetime import datetime +import json +import requests + +def import_tags(challenge, tags_data, path, overwrite_existing): + """ + Imports tags into the database for a given challenge. + """ + validate_tags_data(tags_data, path) + + if overwrite_existing: + Tags.query.filter_by(challenge_id=challenge.id).delete() + + for tag in tags_data: + tag_entry = Tags(challenge_id=challenge.id, value=tag) + db.session.add(tag_entry) + + +def import_flags(challenge_id, flags, repo_id, challenge_uuid, path, overwrite_existing): + """ + Imports flags for a specific challenge and synchronizes them with a GitHub repository. This process allows adding, + updating, and deleting flags based on their presence within the provided input data. + """ + json_flag_uuids = set() + now = datetime.utcnow() + + for flag in flags: + try: + validate_flag_data(flag, path) + except ValueError as ve: + raise ValueError(str(ve)) + + flag_uuid = flag["uuid"] + json_flag_uuids.add(flag_uuid) + + existing_flag_sync = GithubFlagSync.query.filter_by(flag_uuid=flag_uuid).first() + + if existing_flag_sync: + if overwrite_existing: + existing_flag = Flags.query.get(existing_flag_sync.flag_id) + if existing_flag: + existing_flag.type = flag["type"] + existing_flag.content = flag["content"] + existing_flag.data = flag.get("data", "") + existing_flag_sync.last_updated_at = now + else: + new_flag = Flags( + challenge_id=challenge_id, + type=flag["type"], + content=flag["content"], + data=flag.get("data", "") + ) + db.session.add(new_flag) + db.session.flush() + + db.session.add(GithubFlagSync( + flag_id=new_flag.id, + github_repo_id=repo_id, + challenge_uuid=challenge_uuid, + flag_uuid=flag_uuid, + last_updated_at=now + )) + + # Delete flags that no longer exist in the JSON + synced_flags = GithubFlagSync.query.filter_by( + github_repo_id=repo_id, + challenge_uuid=challenge_uuid + ).all() + + for synced in synced_flags: + if synced.flag_uuid not in json_flag_uuids: + flag = Flags.query.get(synced.flag_id) + if flag: + db.session.delete(flag) + db.session.delete(synced) + + +def import_hints(*, challenge_id, hints, repo_id, challenge_uuid, path, overwrite_existing=True): + """ + Imports hints into the system, either creating new hints or updating existing ones. This function manages + the synchronization of hints by using their unique identifiers (UUIDs). If a hint with the same UUID + already exists, it may be updated depending on the overwrite_existing parameter. + """ + validate_hints_data(hints, path) + + for hint_data in hints: + uuid = hint_data.get("uuid") + if not uuid: + raise ValueError("One of the hints is missing the 'uuid' field.") + + existing_sync = GithubHintSync.query.filter_by(hint_uuid=uuid).first() + + if existing_sync: + if overwrite_existing: + hint = Hints.query.get(existing_sync.hint_id) + if hint: + hint.title = hint_data.get("title", "") + hint.content = hint_data.get("content", "") + hint.cost = hint_data.get("cost", 0) + hint.type = hint_data.get("type", "standard") + existing_sync.last_updated_at = datetime.utcnow() + continue + else: + continue + + hint = Hints( + challenge_id=challenge_id, + title=hint_data.get("title", ""), + content=hint_data.get("content", ""), + cost=hint_data.get("cost", 0), + type=hint_data.get("type", "standard") + ) + db.session.add(hint) + db.session.flush() + + db.session.add(GithubHintSync( + hint_id=hint.id, + github_repo_id=repo_id, + hint_uuid=uuid, + challenge_uuid=challenge_uuid, + hint_path=path, + last_updated_at=datetime.utcnow() + )) + + +def import_or_update_challenge(challenge_info, repo, path, overwrite_existing): + """ + Handles the import or update of a challenge within a system by validating + provided data, checking for existing synchronization records, and either + updating or creating new database entries based on the operations performed. + Performs optional overwriting of existing data when specified. + """ + uuid = challenge_info.get("uuid") + if not uuid: + return None, False, "Missing 'uuid' field" + + try: + validated_data = validate_challenge_data(challenge_info, path) + except ValueError as ve: + return None, False, str(ve) + + existing_sync = GithubChallengeSync.query.filter_by(challenge_uuid=uuid).first() + + if existing_sync: + if overwrite_existing: + challenge = Challenges.query.get(existing_sync.challenge_id) + if challenge: + challenge.name = validated_data["name"] + challenge.description = validated_data["description"] + challenge.category = validated_data["category"] + challenge.value = validated_data["value"] + challenge.state = validated_data["state"] + challenge.type = validated_data["type"] + challenge.connection_info = validated_data.get("conection_info") + challenge.max_attempts = validated_data.get("max_attemps", 0) + challenge.attribution = validated_data.get("attribution") + existing_sync.last_updated_at = datetime.utcnow() + return challenge, False, None + else: + return None, False, "Synchronized challenge not found in the database" + else: + return None, False, "Challenge already synchronized (no overwrite)" + else: + challenge = Challenges( + name=validated_data["name"], + description=validated_data["description"], + category=validated_data["category"], + value=validated_data["value"], + state=validated_data["state"], + type=validated_data["type"], + connection_info=validated_data.get("conection_info"), + max_attempts=validated_data.get("max_attemps", 0), + attribution=validated_data.get("attribution") + ) + db.session.add(challenge) + db.session.flush() + + db.session.add(GithubChallengeSync( + challenge_id=challenge.id, + github_repo_id=repo.id, + challenge_uuid=uuid, + challenge_path=path, + last_updated_at=datetime.utcnow() + )) + + return challenge, True, None + + +def remove_orphaned_challenges(repo, processed_paths, delete_mode="sync_only"): + """ + Removes orphaned challenges associated with a given repository. + + This function identifies and removes database entries of challenges that are no + longer present in the processed paths set provided. Orphaned challenges are + identified as paths existing in the database but not in the `processed_paths` set. + The function provides two modes of deletion: `sync_only`, which removes only the + sync information, and `full`, which removes both the sync information and the + associated challenge. Any encountered errors during the process are logged and + returned. + """ + errors = [] + count_removed = 0 + + synced_challenges = GithubChallengeSync.query.filter_by(github_repo_id=repo.id).all() + db_paths = {sc.challenge_path for sc in synced_challenges} + missing_paths = db_paths - processed_paths + + for sc in synced_challenges: + if sc.challenge_path in missing_paths: + try: + if delete_mode == "sync_only": + db.session.delete(sc) + count_removed += 1 + elif delete_mode == "full": + challenge = Challenges.query.get(sc.challenge_id) + if challenge: + db.session.delete(challenge) + db.session.delete(sc) + count_removed += 1 + except Exception as e: + errors.append({"file": sc.challenge_path, "error": f"Error deleting: {str(e)}"}) + continue + + return count_removed, errors + + + +def import_challenges_from_repo(repo, access_token, overwrite_existing=True, delete_mode="full"): + """ + Imports challenge data from a specified GitHub repository into the system. + """ + headers = { + "Authorization": f"token {access_token}", + "Accept": "application/vnd.github+json" + } + + base_url = f"https://api.github.com/repos/{repo.full_name}/contents/challenges" + file_list_resp = requests.get(base_url, headers=headers) + + print(file_list_resp) + + if file_list_resp.status_code != 200: + return {"success": False, "message": "Could not access /challenges in the repository."} + + file_list = file_list_resp.json() + count_created = 0 + count_updated = 0 + count_skipped = 0 + errors = [] + + processed_paths = set() + + for file in file_list: + if not file["name"].endswith(".json"): + continue + + path = file["path"] + + file_resp = requests.get(file["download_url"], headers=headers) + if file_resp.status_code != 200: + errors.append({"file": path, "error": f"HTTP {file_resp.status_code}"}) + continue + + try: + challenge_data = json.loads(file_resp.text) + challenge_info = challenge_data.get("challenge", {}) + + challenge, created, error_msg = import_or_update_challenge(challenge_info, repo, path, overwrite_existing) + + if error_msg: + processed_paths.add(path) + + if error_msg != "Challenge already synchronized (no overwrite)": + errors.append({"file": path, "error": error_msg}) + else: + count_skipped += 1 + continue + + processed_paths.add(path) + + if challenge.type == "standard": + if "dynamic" in challenge_info: + errors.append({"file": path, "error": "Standard challenges cannot have dynamic data."}) + continue + + # Flags + try: + import_flags( + challenge_id=challenge.id, + flags=challenge_info.get("flags", []), + repo_id=repo.id, + challenge_uuid=challenge_info["uuid"], + path=path, + overwrite_existing=overwrite_existing + ) + except ValueError as ve: + errors.append({"file": path, "error": str(ve)}) + continue + + # + try: + import_hints( + challenge_id=challenge.id, + hints=challenge_info.get("hints", []), + repo_id=repo.id, + challenge_uuid=challenge_info["uuid"], + path=path, + overwrite_existing=overwrite_existing + ) + except ValueError as ve: + errors.append({"file": path, "error": str(ve)}) + continue + + # Import tags + tags_data = challenge_info.get("tags", []) + if tags_data: + try: + import_tags(challenge, tags_data, path, overwrite_existing) + except ValueError as ve: + errors.append({"file": path, "error": str(ve)}) + continue + + else: + continue + + if created: + count_created += 1 + else: + count_updated += 1 + + except Exception as e: + errors.append({"file": path, "error": str(e)}) + continue + + count_removed, orphan_errors = remove_orphaned_challenges(repo, processed_paths, delete_mode) + errors.extend(orphan_errors) + + repo.last_synced_at = datetime.utcnow() + db.session.commit() + + return { + "success": True, + "created": count_created, + "updated": count_updated, + "skipped": count_skipped, + "removed": count_removed, + "errors": errors + } \ No newline at end of file diff --git a/CTFd_github_backup/models/__init__.py b/CTFd_github_backup/models/__init__.py new file mode 100644 index 0000000..73caa56 --- /dev/null +++ b/CTFd_github_backup/models/__init__.py @@ -0,0 +1,63 @@ +from CTFd.models import db + +class UserGitHubToken(db.Model): + __tablename__ = "user_github_tokens" + id = db.Column(db.Integer, primary_key=True) + user_id = db.Column(db.Integer, db.ForeignKey("users.id"), unique=True, nullable=False) + token = db.Column(db.String(255), nullable=False) + + user = db.relationship("Users", backref="github_token_entry", uselist=False) + + +class GithubRepositories(db.Model): + __tablename__ = "github_repositories" + + id = db.Column(db.Integer, primary_key=True) + user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False) + github_repo_id = db.Column(db.Integer, nullable=False) + name = db.Column(db.String(255), nullable=False) + full_name = db.Column(db.String(255), nullable=False) + selected = db.Column(db.Boolean, default=False) + last_synced_at = db.Column(db.DateTime, nullable=True) + + user = db.relationship("Users", backref="github_repositories") + + +class GithubChallengeSync(db.Model): + __tablename__ = "github_challenge_sync" + + id = db.Column(db.Integer, primary_key=True) + challenge_id = db.Column(db.Integer, db.ForeignKey("challenges.id", ondelete="CASCADE")) + github_repo_id = db.Column(db.Integer, db.ForeignKey("github_repositories.id", ondelete="CASCADE")) + challenge_uuid = db.Column(db.String(64), nullable=False, unique=True) + challenge_path = db.Column(db.String(255), nullable=True) + last_updated_at = db.Column(db.DateTime, nullable=True) + + challenge = db.relationship("Challenges", backref="github_sync") + repo = db.relationship("GithubRepositories", backref="synced_challenges") + + +class GithubFlagSync(db.Model): + __tablename__ = "github_flag_sync" + + id = db.Column(db.Integer, primary_key=True) + flag_id = db.Column(db.Integer, db.ForeignKey("flags.id", ondelete="CASCADE")) + github_repo_id = db.Column(db.Integer, db.ForeignKey("github_repositories.id", ondelete="CASCADE")) + challenge_uuid = db.Column(db.String(64), nullable=False) + flag_uuid = db.Column(db.String(64), nullable=False) + last_updated_at = db.Column(db.DateTime, nullable=True) + + flag = db.relationship("Flags", backref="github_sync") + repo = db.relationship("GithubRepositories", backref="synced_flags") + + +class GithubHintSync(db.Model): + __tablename__ = "github_hint_sync" + + id = db.Column(db.Integer, primary_key=True) + hint_id = db.Column(db.Integer, db.ForeignKey("hints.id", ondelete="CASCADE")) + github_repo_id = db.Column(db.Integer, db.ForeignKey("github_repositories.id", ondelete="CASCADE")) + hint_uuid = db.Column(db.String(128), nullable=False, unique=True) + challenge_uuid = db.Column(db.String(128), nullable=False) + hint_path = db.Column(db.String(512), nullable=True) + last_updated_at = db.Column(db.DateTime, nullable=True) \ No newline at end of file diff --git a/CTFd_github_backup/models/__pycache__/__init__.cpython-311.pyc b/CTFd_github_backup/models/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..af2a8aa Binary files /dev/null and b/CTFd_github_backup/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/CTFd_github_backup/static/css/style.css b/CTFd_github_backup/static/css/style.css new file mode 100644 index 0000000..fc3712f --- /dev/null +++ b/CTFd_github_backup/static/css/style.css @@ -0,0 +1,92 @@ +.header { + grid-area: header; + text-align: center; + padding: 60px 10px; +} + +/* COMMON CLASSES */ +.import__header, .export__header { + border-bottom: 2px solid #da8b00; + margin-bottom: 32px; + font-weight: bolder; +} + +.subsection-title { + font-weight: bold; + padding-bottom: 4px; +} + +.a-button { + width: 100%; + margin-top: 16px; +} + +.pagination { + display: flex; + justify-content: center; + align-items: center; +} + +.multiple-import, .multiple-export { + display: flex; + justify-content: flex-end; + padding-bottom: 8px; +} + +/* IMPORT SECTION */ +.import { + padding-bottom: 48px; +} + +.import__body { + display: grid; + grid-template-columns: 30% 65%; + grid-template-areas: + "aside main"; + gap: 48px; +} + +.body-import__aside { + grid-area: aside; +} + +.body-import__main { + grid-area: main; + padding-top: 0; +} + +.aside-import__error { + margin: 16px 0; + display: none; +} + +.aside-import__repositories { + display: none; +} + +.repositories__button { + width: 100%; +} + +.aside-import__info { + border: 4px solid #da8b00; + margin-top: 32px; + padding: 12px; + border-radius: 8px; +} + +.aside-import__info .info__title { + color: #da8b00; + border-bottom: 1px solid #da8b00; +} + +.info__button { + width: 100%; +} + +.main-import__settings { + margin-bottom: 24px; +} + +/* EXPORT SECTION */ + diff --git a/CTFd_github_backup/static/js/auth_section.js b/CTFd_github_backup/static/js/auth_section.js new file mode 100644 index 0000000..3fe5720 --- /dev/null +++ b/CTFd_github_backup/static/js/auth_section.js @@ -0,0 +1,24 @@ +const bottonLinkInstalation = document.querySelector("#button-link-instalation"); + +// Complete instalation. Get de installation id +bottonLinkInstalation?.addEventListener("click", () => { + fetch("/plugins/github_backup/installations", { + method: "GET", + credentials: "same-origin", + headers: { + "CSRF-Token": CTFd.config.csrfNonce + } + }) + .then((response) => response.json()) + .then((data) => { + if (data.success) { + confirm("Linked successfully. " + data.message); + location.reload(); + } else { + alert("Error: " + (data.message || "Unexpected error")); + } + }) + .catch((err) => { + alert("Unexpected Error: " + err.message); + }); +}); \ No newline at end of file diff --git a/CTFd_github_backup/static/js/export_section.js b/CTFd_github_backup/static/js/export_section.js new file mode 100644 index 0000000..7fc8332 --- /dev/null +++ b/CTFd_github_backup/static/js/export_section.js @@ -0,0 +1,277 @@ +let challenges = []; +let currentChallengePage = 1; +const challengesPerPage = 10; + +let selectedChallenges = new Set(); + +const buttonDownloadExample = document.getElementById("example-button"); + +buttonDownloadExample.addEventListener("click", () => { + fetch(`/plugins/github_backup/challenges/download/example`, { + method: "GET", + credentials: "same-origin" + }) + .then((response) => { + if (!response.ok) throw new Error("Error during export"); + + const disposition = response.headers.get("Content-Disposition"); + let filename = `challenge_example.json`; + if (disposition && disposition.includes("filename=")) { + filename = disposition.split("filename=")[1].replace(/"/g, ""); + } + + return response.blob().then((blob) => ({ blob, filename })); + }) + .then(({blob, filename}) => { + const url = window.URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = filename; + document.body.appendChild(a); + a.click(); + a.remove(); + window.URL.revokeObjectURL(url); + }) + .catch((err) => { + alert("Unexpected Error: " + err.message); + }); + +}) + +/** + * Loads all challenges from the server and updates the challenge table in the DOM. + */ +function loadAllChallenges() { + const tableBody = document.querySelector("#challenge-table tbody"); + tableBody.innerHTML = ""; + + fetch("/plugins/github_backup/challenges", { + method: "GET", + credentials: "same-origin" + }) + .then((response) => response.json()) + .then((data) => { + if (!data.success) { + throw new Error(data.message || "Could not load the challenges."); + } + + challenges = data.challenges; + + if (challenges.length === 0) { + const emptyRow = document.createElement("tr"); + const td = document.createElement("td"); + td.setAttribute("colspan", "4"); + td.classList.add("text-center", "text-muted"); + td.textContent = "No challenges found."; + emptyRow.appendChild(td); + tableBody.appendChild(emptyRow); + return; + } + + renderChallenges(); + renderChallengePagination(); + }) + .catch((err) => { + alert("Unexpected Error: " + err.message); + }); +} + + +/** + * Renders a table of challenges with pagination and interactive controls for exporting challenges. + */ +function renderChallenges() { + const tableBody = document.querySelector("#challenge-table tbody"); + tableBody.innerHTML = ""; + + const start = (currentChallengePage - 1) * challengesPerPage; + const end = start + challengesPerPage; + const pageChallenges = challenges.slice(start, end); + + pageChallenges.forEach((challenge) => { + const tr = document.createElement("tr"); + + const checkboxTd = document.createElement("td"); + const checkbox = document.createElement("input"); + checkbox.type = "checkbox"; + checkbox.className = "export-checkbox"; + checkbox.setAttribute('data-id', challenge.id); + checkbox.value = challenge.id; + + if (selectedChallenges.has(challenge.id)) { + checkbox.checked = true; + } + + checkbox.addEventListener("change", () => { + if (checkbox.checked) { + selectedChallenges.add(challenge.id); + } else { + selectedChallenges.delete(challenge.id); + } + }); + + checkboxTd.appendChild(checkbox); + + const nameTd = document.createElement("td"); + nameTd.textContent = challenge.name; + + const importedGithubTd = document.createElement("td"); + importedGithubTd.textContent = challenge.imported ? "Yes" : "No"; + + const actionsTd = document.createElement("td"); + actionsTd.innerHTML = ` + + `; + + tr.appendChild(checkboxTd); + tr.appendChild(nameTd); + tr.appendChild(importedGithubTd); + tr.appendChild(actionsTd); + tableBody.appendChild(tr); + }); + + + document.querySelectorAll(".export-challenge-btn").forEach((btn) => { + btn.addEventListener("click", () => { + const challengeId = btn.getAttribute("data-id"); + const row = btn.closest("tr"); + const imported = row.querySelector("td:nth-child(3)").textContent === "Yes"; + + let message = "Do you want to export the challenge?"; + if (!imported) { + message += "\n\n⚠️ This challenge was not imported from GitHub.\n" + + "UUID fields will be generated automatically during export."; + } + + if (confirm(message)) { + fetch(`/plugins/github_backup/challenge/${challengeId}/download`, { + method: "GET", + credentials: "same-origin" + }) + .then((response) => { + if (!response.ok) throw new Error("Error during export"); + + const disposition = response.headers.get("Content-Disposition"); + let filename = `challenge_${challengeId}.json`; + if (disposition && disposition.includes("filename=")) { + filename = disposition.split("filename=")[1].replace(/"/g, ""); + } + + return response.blob().then((blob) => ({ blob, filename })); + }) + .then(({blob, filename}) => { + const url = window.URL.createObjectURL(blob); + const a = document.createElement("a"); + a.href = url; + a.download = filename; + document.body.appendChild(a); + a.click(); + a.remove(); + window.URL.revokeObjectURL(url); + }) + .catch((err) => { + alert("Unexpected Error: " + err.message); + }); + } + }); + }); +} + +/** + * Updates and renders the pagination controls for the challenge list. + */ +function renderChallengePagination() { + const challengePagination = document.querySelector("#challenge-pagination"); + challengePagination.innerHTML = ""; + + const totalPages = Math.ceil(challenges.length / challengesPerPage); + + const prevLi = document.createElement("li"); + prevLi.className = `page-item ${currentChallengePage === 1 ? "disabled" : ""}`; + prevLi.innerHTML = `Previous`; + prevLi.addEventListener("click", (e) => { + e.preventDefault(); + if (currentChallengePage > 1) { + currentChallengePage--; + renderChallenges(); + renderChallengePagination(); + } + }); + challengePagination.appendChild(prevLi); + + const pageInfo = document.createElement("li"); + pageInfo.className = "page-item disabled"; + pageInfo.innerHTML = `${currentChallengePage} / ${totalPages}`; + challengePagination.appendChild(pageInfo); + + const nextLi = document.createElement("li"); + nextLi.className = `page-item ${currentChallengePage === totalPages ? "disabled" : ""}`; + nextLi.innerHTML = `Next`; + nextLi.addEventListener("click", (e) => { + e.preventDefault(); + if (currentChallengePage < totalPages) { + currentChallengePage++; + renderChallenges(); + renderChallengePagination(); + } + }); + challengePagination.appendChild(nextLi); +} + +loadAllChallenges(); + +// Export selected challenges +document.getElementById("export-selected-challenges").addEventListener("click", async () => { + const challengeIds = Array.from(selectedChallenges); + + if (challengeIds.length === 0) { + alert("No challenges selected"); + return; + } + + try { + const response = await fetch("/plugins/github_backup/challenges/download", { + method: "POST", + credentials: "same-origin", + headers: { + "Content-Type": "application/json", + "CSRF-Token": CTFd.config.csrfNonce + }, + body: JSON.stringify({ challenge_ids: challengeIds }) + }); + + if (!response.ok) throw new Error("Error generating ZIP"); + + const blob = await response.blob(); + const url = window.URL.createObjectURL(blob); + + const a = document.createElement("a"); + a.href = url; + a.download = "challenges_export.zip"; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + window.URL.revokeObjectURL(url); + + // Reset checkboxes + selectedChallenges.clear(); + + } catch (err) { + console.error("Error:", err); + alert("There was a problem exporting the challenges..."); + } +}); + +// Select all +document.getElementById("select-all-challenges").addEventListener("click", () => { + challenges.forEach(ch => selectedChallenges.add(ch.id)); + renderChallenges(); +}); + +// Unselect all +document.getElementById("deselect-all-challenges").addEventListener("click", () => { + selectedChallenges.clear(); + renderChallenges(); +}); \ No newline at end of file diff --git a/CTFd_github_backup/static/js/import_section.js b/CTFd_github_backup/static/js/import_section.js new file mode 100644 index 0000000..8136887 --- /dev/null +++ b/CTFd_github_backup/static/js/import_section.js @@ -0,0 +1,477 @@ +let isImportingRepos = false; + +let allRepos = []; +let currentPage = 1; +const ITEMS_PER_PAGE = 10; +let selectedRepos = new Set(); + +// Paginación de repos guardados +let savedRepos = []; +let savedReposPage = 1; +const SAVED_ITEMS_PER_PAGE = 4; +let selectedSavedRepos = new Set(); + + +const githubRepoSearch = document.querySelector("#github-repo-search"); +const githubReposList = document.querySelector("#github-repos-list"); +const githubRepoPagination = document.querySelector("#github-repo-pagination"); +const githubReposSection = document.querySelector("#github-repos-section"); +const githubLoginSection = document.querySelector("#github-login-section"); +const buttonSaveSelectedRepos = document.querySelector("#save-selected-repos"); + +githubRepoSearch?.addEventListener("input", () => { + currentPage = 1; + renderRepos(); +}); + + +/** + * Renders a list of repositories based on the current search term, pagination state, + * and the selected repositories. Filters the repositories by the search term, paginates + * the results, and populates the repository list into the DOM. + */ +function renderRepos() { + const searchTerm = githubRepoSearch.value.toLowerCase(); + const filteredRepos = allRepos.filter(repo => + repo.full_name.toLowerCase().includes(searchTerm) + ); + const totalPages = Math.ceil(filteredRepos.length / ITEMS_PER_PAGE); + const start = (currentPage - 1) * ITEMS_PER_PAGE; + const end = start + ITEMS_PER_PAGE; + const reposToShow = filteredRepos.slice(start, end); + + githubReposList.innerHTML = ""; + reposToShow.forEach((repo, index) => { + const id = `repo-${start + index}`; + const div = document.createElement("div"); + div.className = "form-check"; + div.innerHTML = ` + + + `; + githubReposList.appendChild(div); + + div.querySelector("input").addEventListener("change", (e) => { + if (e.target.checked) { + selectedRepos.add(repo.full_name); + } else { + selectedRepos.delete(repo.full_name) + } + }) + }); + + renderPagination(totalPages); +} + + +/** + * Renders the pagination control for navigating through pages. + */ +function renderPagination(totalPages) { + githubRepoPagination.innerHTML = ""; + + // Botón Anterior + const prevLi = document.createElement("li"); + prevLi.className = `page-item ${currentPage === 1 ? "disabled" : ""}`; + prevLi.innerHTML = `Previous`; + prevLi.addEventListener("click", (e) => { + e.preventDefault(); + if (currentPage > 1) { + currentPage--; + renderRepos(); + } + }); + githubRepoPagination.appendChild(prevLi); + + // Texto página actual / total + const pageInfo = document.createElement("li"); + pageInfo.className = "page-item disabled"; + pageInfo.innerHTML = `${currentPage} / ${totalPages}`; + githubRepoPagination.appendChild(pageInfo); + + // Botón Siguiente + const nextLi = document.createElement("li"); + nextLi.className = `page-item ${currentPage === totalPages ? "disabled" : ""}`; + nextLi.innerHTML = `Next`; + nextLi.addEventListener("click", (e) => { + e.preventDefault(); + if (currentPage < totalPages) { + currentPage++; + renderRepos(); + } + }); + githubRepoPagination.appendChild(nextLi); +} + + +/* Fetch repos from the server and render them */ +fetch("/plugins/github_backup/repos") + .then(response => { + if (!response.ok) { + if (response.status === 401) { + throw new Error("User not authenticated with GitHub. Please install the app or make sure you have the necessary permissions."); + } + if (response.status === 500) { + throw new Error("An unexpected error ocurred while attempting to connect to GitHub. Please check the GitHubApp settings and your Internet connection."); + } + return response.json().then(err => { + throw new Error(err.message || "Unexpected error communicating with the API."); + }); + } + return response.json(); + }) + .then(data => { + if (!data.success) { + throw new Error(data.message || "The API did not respond successfully."); + } + + allRepos = data.repos || []; + + if (githubLoginSection) githubLoginSection.style.display = "none"; + if (githubReposSection) githubReposSection.style.display = "block"; + const errorSection = document.getElementById("github-error-section"); + if (errorSection) errorSection.style.display = "none"; + + renderRepos(); + loadSavedRepos(); + }) + .catch(error => { + console.error("Error fetching the repos: ", error); + const errorSection = document.querySelector("#github-error-section"); + const errorMessage = document.querySelector("#github-error-message"); + if (errorMessage) errorMessage.textContent = error.message; + if (errorSection) errorSection.style.display = "block"; + if (githubLoginSection) githubLoginSection.style.display = "block"; + if (githubReposSection) githubReposSection.style.display = "none"; + }); + + +/* Save selected repos */ +buttonSaveSelectedRepos?.addEventListener("click", () => { + const selected = Array.from(selectedRepos).map(name => { + return allRepos.find(r => r.full_name === name); + }).filter(Boolean); + + if (selected.length === 0) { + alert("No repositories selected"); + return; + } + + fetch("/plugins/github_backup/repos/selection", { + method: "POST", + credentials: "same-origin", + headers: { + "Content-Type": "application/json", + "CSRF-Token": CTFd.config.csrfNonce + }, + body: JSON.stringify({ repos: selected }) + }) + .then(response => response.json()) + .then(data => { + if (data.success) { + alert("Repositories saved successfully."); + loadSavedRepos(); + } else { + alert("Error saving: " + data.message || "Unexpected error."); + } + }) + .catch(err => { + alert("Unexpected Error: " + err.message); + }); +}); + + +/** + * Loads the saved repositories and updates the UI with the list of repositories retrieved. + */ +function loadSavedRepos() { + const tableBody = document.querySelector("#github-saved-repos-table tbody"); + const paginationContainer = document.querySelector("#github-saved-repos-pagination"); + + tableBody.innerHTML = ` + + + +
Loading saved repositories...
+ + + `; + + fetch("/plugins/github_backup/repos/saved", { + method: "GET", + credentials: "same-origin" + }) + .then(r => r.json()) + .then(data => { + tableBody.innerHTML = ""; + + if (!data.success) { + tableBody.innerHTML = `ERROR: ${data.message || "Could not load the repositories."}`; + return; + } + + const previousSelection = new Set(selectedSavedRepos); + savedRepos = data.repos || []; + savedReposPage = 1; + selectedSavedRepos = new Set([...previousSelection].filter(id => savedRepos.some(r => String(r.id) === id))); + + renderSavedRepos(); + }) + .catch(err => { + tableBody.innerHTML = `Error: ${err.message}`; + }); +} + +function renderSavedRepos() { + const tableBody = document.querySelector("#github-saved-repos-table tbody"); + const paginationContainer = document.querySelector("#github-saved-repos-pagination"); + + tableBody.innerHTML = ""; + + if (savedRepos.length === 0) { + tableBody.innerHTML = `No saved repositories.`; + return; + } + + const totalPages = Math.ceil(savedRepos.length / SAVED_ITEMS_PER_PAGE); + const start = (savedReposPage - 1) * SAVED_ITEMS_PER_PAGE; + const end = start + SAVED_ITEMS_PER_PAGE; + const reposToShow = savedRepos.slice(start, end); + + // Renderizamos filas + reposToShow.forEach(repo => { + const tr = document.createElement("tr"); + + const checkedAttr = selectedSavedRepos.has(String(repo.id)) ? "checked" : ""; + + tr.innerHTML = ` + + + + ${repo.full_name} + ${repo.last_synced_at ? repo.last_synced_at + " UTC" : "-"} + + + + + `; + tableBody.appendChild(tr); + }); + + // ✅ Aquí está la clave: + // Escuchar cambios de checkbox y actualizar el Set global + tableBody.querySelectorAll(".sync-checkbox").forEach(cb => { + cb.addEventListener("change", e => { + const id = String(e.target.value); + if (e.target.checked) { + selectedSavedRepos.add(id); + } else { + selectedSavedRepos.delete(id); + } + }); + + // Aseguramos que el estado visual sea correcto incluso si se volvió a renderizar + if (selectedSavedRepos.has(String(cb.value))) { + cb.checked = true; + } + }); + + // Paginación + renderSavedReposPagination(totalPages); + + // Reasignar los listeners de botones + attachRepoEventListeners(); +} + +function renderSavedReposPagination(totalPages) { + const pagination = document.querySelector("#github-saved-repos-pagination"); + pagination.innerHTML = ""; + + const prevLi = document.createElement("li"); + prevLi.className = `page-item ${savedReposPage === 1 ? "disabled" : ""}`; + prevLi.innerHTML = `Previous`; + prevLi.addEventListener("click", e => { + e.preventDefault(); + if (savedReposPage > 1) { + savedReposPage--; + renderSavedRepos(); + } + }); + pagination.appendChild(prevLi); + + const infoLi = document.createElement("li"); + infoLi.className = "page-item disabled"; + infoLi.innerHTML = `${savedReposPage} / ${totalPages}`; + pagination.appendChild(infoLi); + + const nextLi = document.createElement("li"); + nextLi.className = `page-item ${savedReposPage === totalPages ? "disabled" : ""}`; + nextLi.innerHTML = `Next`; + nextLi.addEventListener("click", e => { + e.preventDefault(); + if (savedReposPage < totalPages) { + savedReposPage++; + renderSavedRepos(); + } + }); + pagination.appendChild(nextLi); +} + + +/** + * Attaches event listeners to various elements associated with repository management, + * such as buttons for deleting repositories, syncing repositories, and importing multiple repositories. + */ +function attachRepoEventListeners() { + document.querySelectorAll(".delete-repo-btn").forEach(btn => { + btn.onclick = () => { + const repoId = btn.dataset.id; + if (!confirm("Are you sure you want to delete this repository?")) return; + + fetch(`/plugins/github_backup/repos/${repoId}`, { + method: "DELETE", + credentials: "same-origin", + headers: { "Content-Type": "application/json", "CSRF-Token": CTFd.config.csrfNonce } + }) + .then(r => r.json()) + .then(resp => { + if (resp.success) { + alert("Deleted: " + resp.message); + loadSavedRepos(); + loadAllChallenges(); + } else { + alert("Error: " + resp.message); + } + }); + }; + }); + + document.querySelectorAll(".sync-now-btn").forEach(btn => { + btn.onclick = async () => { + if (isImportingRepos) return; + isImportingRepos = true; + + const repoId = btn.dataset.id; + const deleteModeChecked = document.querySelector('input[name="delete-mode"]:checked'); + const deleteModeValue = deleteModeChecked ? deleteModeChecked.value : false; + + if (!confirm("Are you sure you want to import the challenges from this repository?")) { + isImportingRepos = false; + return; + } + + const row = btn.closest("tr"); + const syncCell = row.querySelector("td:nth-child(3)"); + const deleteBtn = row.querySelector(".delete-repo-btn"); + const originalSyncContent = syncCell.innerHTML; + + // Spinner + syncCell.innerHTML = `
Importing...
`; + btn.disabled = true; + deleteBtn.disabled = true; + + try { + const resp = await fetch(`/plugins/github_backup/repos/${repoId}/import`, { + method: "POST", + credentials: "same-origin", + headers: { "Content-Type": "application/json", "CSRF-Token": CTFd.config.csrfNonce }, + body: JSON.stringify({ delete_mode: deleteModeValue }) + }).then(r => r.json()); + + if (!resp.success) throw new Error(resp.message || "Import failed"); + + let message = resp.message; + if (resp.errors && resp.errors.length > 0) { + message += "\nErrors:\n" + resp.errors.map(e => `- ${e.file}: ${e.error}`).join("\n"); + } + alert("Import complete:\n" + message); + + loadSavedRepos(); + loadAllChallenges(); + } catch (err) { + alert("Unexpected Error: Check if the repository follows the expected format. " + err.message); + syncCell.innerHTML = originalSyncContent; + } finally { + btn.disabled = false; + deleteBtn.disabled = false; + syncCell.innerHTML = originalSyncContent; + isImportingRepos = false; + } + }; + }); + + + const importSelectedBtn = document.querySelector("#import-selected-repos"); + if (importSelectedBtn) { + importSelectedBtn.onclick = async () => { + if (isImportingRepos) return; + const checkboxes = document.querySelectorAll(".sync-checkbox:checked"); + if (checkboxes.length === 0) { + alert("No repositories selected.\nSelect at least one repository to import."); + return; + } + if (!confirm(`You will import ${checkboxes.length} repositories. Continue?`)) return; + + isImportingRepos = true; + const deleteModeChecked = document.querySelector('input[name="delete-mode"]:checked'); + const deleteModeValue = deleteModeChecked ? deleteModeChecked.value : false; + + for (const cb of checkboxes) { + const repoId = cb.value; + const row = cb.closest("tr"); + const syncCell = row.querySelector("td:nth-child(3)"); + const deleteBtn = row.querySelector(".delete-repo-btn"); + const btn = row.querySelector(".sync-now-btn"); + const originalSyncContent = syncCell.innerHTML; + + // Spinner + syncCell.innerHTML = `
Importing...
`; + btn.disabled = true; + deleteBtn.disabled = true; + + try { + const resp = await fetch(`/plugins/github_backup/repos/${repoId}/import`, { + method: "POST", + credentials: "same-origin", + headers: { "Content-Type": "application/json", "CSRF-Token": CTFd.config.csrfNonce }, + body: JSON.stringify({ delete_mode: deleteModeValue }) + }).then(r => r.json()); + + if (!resp.success) throw new Error(resp.message || "Import failed"); + + let message = resp.message; + if (resp.errors && resp.errors.length > 0) { + message += "\nErrors:\n" + resp.errors.map(e => `- ${e.file}: ${e.error}`).join("\n"); + } + alert(`Imported ${repoId}: ${message}`); + } catch (err) { + alert(`Error importing ${repoId}: ${err.message}`); + syncCell.innerHTML = originalSyncContent; + } finally { + btn.disabled = false; + deleteBtn.disabled = false; + syncCell.innerHTML = originalSyncContent; + } + } + + loadSavedRepos(); + loadAllChallenges(); + isImportingRepos = false; + }; + } +} diff --git a/CTFd_github_backup/templates/admin/plugins/github_backup.html b/CTFd_github_backup/templates/admin/plugins/github_backup.html new file mode 100644 index 0000000..3803678 --- /dev/null +++ b/CTFd_github_backup/templates/admin/plugins/github_backup.html @@ -0,0 +1,194 @@ +{# plugins/github_backup/templates/admin/github_backup.html #} +{% extends "admin/base.html" %} +{% block content %} +
+
+

Github backup

+

Import challenges from Github and export to JSON

+
+ + {# Import section #} +
+
+

Import challenges from Github

+
+ +
+
+ {# Auth section #} +
+
+ Connect your GitHub account to import challenges from your public or private repositories. +
+ +

Configure a GitHub App by following the steps in the plugin's README.md file. Then follow the buttons below to install the GitHub App on your GitHub account and link the installation.

+ + + 1. Install GitHub App + + + + 2. Link installation + +
+ + {# Error section #} +
+ +
+ + +
+
+ Github repositories +
+ + + +
+ +
+ + + + +
+ +
+
+ Important +
+ +

The repositories must have a folder named challenges that contains JSON files with the following format:

+ + +
+
+ + {# main #} +
+
+
Import/update settings
+
+
+ +
+ + +
+ + +
+
+
+ +
+
+
Saved Repositories
+
+ +

You can import challenges for each repository individually or select multiple repositories from the table and import them together by clicking the “Import selected repositories” button.

+ +
+ +
+ + + + + + + + + + + + + + + +
Name Last import time Actions
+ +
Loading saved repositories...
+
+ + + +
+
+
+ +
+ + {# Export section #} +
+
+

Export challenges to JSON files

+
+ +
+
+
+ Challenges +
+ +

You can export challenges individually or select several challenges from the table and download a zip file containing a JSON file for each challenge. For challenges that have not been imported from the Github App, a UUID will be generated in the necessary fields.

+ +
+ + + +
+ + + + + + + + + + +
Name Imported from Github Actions
+ + +
+
+
+ + + +
+{% endblock %} + + +{% block scripts %} + {{ super() }} + + + + + +{% endblock %} + diff --git a/CTFd_github_backup/utils.py b/CTFd_github_backup/utils.py new file mode 100644 index 0000000..aac592b --- /dev/null +++ b/CTFd_github_backup/utils.py @@ -0,0 +1,14 @@ +import uuid + +def generate_uuid(): + """ + Generates a new universally unique identifier (UUID). + + This function creates and returns a randomly generated UUID using + the standard library `uuid` module. It ensures a high probability of + uniqueness even across different systems or environments. + + Returns: + UUID: The newly generated UUID object. + """ + return str(uuid.uuid4()) \ No newline at end of file diff --git a/CTFd_github_backup/validate_data.py b/CTFd_github_backup/validate_data.py new file mode 100644 index 0000000..0cd6ea7 --- /dev/null +++ b/CTFd_github_backup/validate_data.py @@ -0,0 +1,107 @@ +# Validate challenges +def validate_challenge_data(data, path): + required_fields = ["uuid", "name", "description", "category", "value", "type", "state"] + + for field in required_fields: + if field not in data: + raise ValueError(f"{path}: Missing required field '{field}'") + + if not isinstance(data["uuid"], str): + raise ValueError(f"{path}: 'uuid' must be a string") + + if not isinstance(data["name"], str) or len(data["name"]) > 80: + raise ValueError(f"{path}: 'name' must be a string of up to 80 characters") + + if not isinstance(data["category"], str) or len(data["category"]) > 80: + raise ValueError(f"{path}: 'category' must be a string of up to 80 characters") + + if not isinstance(data["description"], str): + raise ValueError(f"{path}: 'description' must be a string") + + if not isinstance(data["value"], int) or data["value"] < 0: + raise ValueError(f"{path}: 'value' must be a positive integer") + + if data["type"] not in ["standard"]: + raise ValueError(f"{path}: 'type' is not valid") + + if data["state"] not in ["visible", "hidden"]: + raise ValueError(f"{path}: 'state' is not valid") + + return data + +def validate_flag_data(flag, path): + required_fields = ["uuid", "type", "content"] + + for field in required_fields: + if field not in flag: + raise ValueError(f"{path}: Flag missing required field '{field}'") + + if not isinstance(flag["uuid"], str): + raise ValueError(f"{path}: 'uuid' of flag must be a string") + + if flag["type"] not in ["static", "regex"]: + raise ValueError(f"{path}: Flag type not supported") + + if not isinstance(flag["content"], str): + raise ValueError(f"{path}: Flag content is not valid") + + if flag["data"] not in ["case_insensitive", ""]: + raise ValueError(f"{path}: Flag data not supported") + + +def validate_tags_data(tags, path): + if not isinstance(tags, list): + raise ValueError(f"{path}: The 'tags' field must be a list.") + for tag in tags: + if not isinstance(tag, str): + raise ValueError(f"{path}: Tags must be strings.") + + +def validate_hints_data(hints, path): + if not isinstance(hints, list): + raise ValueError(f"{path}: The 'hints' field must be a list.") + + for i, hint in enumerate(hints): + if not isinstance(hint, dict): + raise ValueError(f"{path}: Each hint must be a JSON object (index {i}).") + + required_fields = ["uuid", "title", "content", "type", "cost"] + for field in required_fields: + if field not in hint: + raise ValueError(f"{path}: Missing required field '{field}' in hint (index {i}).") + + if not isinstance(hint["uuid"], str): + raise ValueError(f"{path}: The 'uuid' field of the hint (index {i}) must be a string.") + + if not isinstance(hint["content"], str): + raise ValueError(f"{path}: The 'content' field of the hint (index {i}) must be a string.") + + if not isinstance(hint["type"], str): + raise ValueError(f"{path}: The 'type' field of the hint (index {i}) must be a string.") + + if not isinstance(hint["title"], str): + raise ValueError(f"{path}: The 'title' field of the hint (index {i}) must be a string.") + + if not isinstance(hint["cost"], int): + raise ValueError(f"{path}: The 'cost' field of the hint (index {i}) must be an integer.") + + +def validate_dynamic_data(dynamic, path): + required_fields = ["initial", "minimum", "decay", "function"] + + for field in required_fields: + if field not in dynamic: + raise ValueError(f"{path}: Missing required field '{field}' for dynamic challenge") + + if not isinstance(dynamic["initial"], int) or dynamic["initial"] < 0: + raise ValueError(f"{path}: 'initial' must be a non-negative integer for dynamic challenge") + + if not isinstance(dynamic["minimum"], int) or dynamic["minimum"] < 0: + raise ValueError(f"{path}: 'minimum' must be a non-negative integer for dynamic challenge") + + if not isinstance(dynamic["decay"], int) or dynamic["decay"] < 0: + raise ValueError(f"{path}: 'decay' must be a non-negative integer for dynamic challenge") + + if dynamic["function"] not in ["linear", "logarithmic"]: + raise ValueError(f"{path}: 'function' must be either 'linear' or 'logarithmic' for dynamic challenge") +