From 3ba2df52c4fc7d2cbb039dff73742890f2832224 Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Tue, 26 Mar 2024 14:53:07 -0700 Subject: [PATCH 01/70] Infrastructure: Support ZFS --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 9eea9fde2..e09dfd853 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,8 @@ RUN apt-get update && \ iproute2 \ iputils-ping \ host \ - htop + htop \ + zfsutils-linux RUN curl -fsSL https://get.docker.com | /bin/sh RUN echo '{ "data-root": "/opt/pwn.college/data/docker", "builder": {"Entitlements": {"security-insecure": true}} }' > /etc/docker/daemon.json From de437d01a45732e6a444326080e308efd05c9586 Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Tue, 26 Mar 2024 15:42:03 -0700 Subject: [PATCH 02/70] Workspace: Limit ZFS Storage --- dojo_plugin/api/v1/docker.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dojo_plugin/api/v1/docker.py b/dojo_plugin/api/v1/docker.py index d1e334659..a6af3c273 100644 --- a/dojo_plugin/api/v1/docker.py +++ b/dojo_plugin/api/v1/docker.py @@ -77,6 +77,8 @@ def start_container(user, dojo_challenge, practice): if os.path.exists("/dev/net/tun"): devices.append("/dev/net/tun:/dev/net/tun:rwm") + storage_driver = docker_client.info().get("Driver") + container = docker_client.containers.create( dojo_challenge.image, entrypoint=["/bin/sleep", "6h"], @@ -115,10 +117,11 @@ def start_container(user, dojo_challenge, practice): init=True, cap_add=["SYS_PTRACE"], security_opt=[f"seccomp={SECCOMP}"], + storage_opt=dict(size="256G") if storage_driver == "zfs" else None, cpu_period=100000, cpu_quota=400000, pids_limit=1024, - mem_limit="4000m", + mem_limit="4G", detach=True, auto_remove=True, ) From b071ec7224ecb0919a2957efb98aae7f45162391 Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Wed, 27 Mar 2024 00:22:48 -0700 Subject: [PATCH 03/70] unholy production commits --- sshd/Dockerfile | 5 +++-- sshd/auth.py | 36 +++++++++++++++++++++++------------- sshd/start.sh | 6 ++++++ 3 files changed, 32 insertions(+), 15 deletions(-) create mode 100755 sshd/start.sh diff --git a/sshd/Dockerfile b/sshd/Dockerfile index 77ca7c8bb..0a53b832c 100644 --- a/sshd/Dockerfile +++ b/sshd/Dockerfile @@ -4,7 +4,8 @@ RUN apk add --no-cache \ python3 \ py3-pip \ openssh-server-pam \ - docker-cli + docker-cli \ + mysql-client RUN pip3 install --break-system-packages docker @@ -22,4 +23,4 @@ RUN chmod 700 /opt/sshd/auth.py EXPOSE 22 -ENTRYPOINT ["/usr/sbin/sshd.pam", "-D", "-e", "-f", "/opt/sshd/sshd_config"] +ENTRYPOINT ["/opt/sshd/start.sh"] diff --git a/sshd/auth.py b/sshd/auth.py index ebafbc534..2531b1d73 100755 --- a/sshd/auth.py +++ b/sshd/auth.py @@ -2,9 +2,26 @@ import sys import pathlib +import os +import subprocess -import docker +# adamd: insanity to reload the environment varaibles from the docker compose +global_env = "/etc/environment" +if os.path.exists(global_env): + with open(global_env, "r") as f: + for line in f.readlines(): + res = line.strip().split("=", maxsplit=1) + if res and len(res) == 2: + key = res[0] + value = res[1] + os.environ[key] = value + + +DB_HOST = os.environ.get('DB_HOST', "db") +DB_NAME = os.environ.get('DB_NAME', "ctfd") +DB_USER = os.environ.get('DB_USER', "ctfd") +DB_PASS = os.environ.get('DB_PASS', "ctfd") def error(msg): print(msg, file=sys.stderr) @@ -13,20 +30,13 @@ def error(msg): def main(): enter_path = pathlib.Path(__file__).parent.resolve() / "enter.py" - client = docker.from_env() - - try: - container = client.containers.get("db") - except docker.errors.NotFound: - error("Error: ctfd is not running!") - result = container.exec_run( - "mysql -pctfd -Dctfd -sNe 'select value, user_id from ssh_keys;'" - ) - if result.exit_code != 0: - error(f"Error: db query exited with code '{result.exit_code}'") + connect_arg = f"-h{DB_HOST}" if DB_HOST else "" + result = subprocess.run(["mysql", connect_arg, f"-p{DB_PASS}", f"-u{DB_USER}", f"-D{DB_NAME}", "-sNe", 'select value, user_id from ssh_keys;'], stdout=subprocess.PIPE) + if result.returncode != 0: + error(f"Error: db query exited with code '{result.returncode}'") - for row in result.output.strip().split(b"\n"): + for row in result.stdout.strip().split(b"\n"): key, user_id = row.decode().split("\t") print(f'command="{enter_path} user_{user_id}" {key}') diff --git a/sshd/start.sh b/sshd/start.sh new file mode 100755 index 000000000..9766c02a3 --- /dev/null +++ b/sshd/start.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +# adamd: hack here so that the auth.py command can get the environment variables we set in the docker compose +printenv | grep -v "no_proxy" >> /etc/environment + +/usr/sbin/sshd.pam -D -e -f /opt/sshd/sshd_config From ad3ea076bc445ca5d2af71dcb221feb9eedb48f0 Mon Sep 17 00:00:00 2001 From: Robert Wasinger Date: Wed, 27 Mar 2024 18:46:52 -0700 Subject: [PATCH 04/70] Add dojjail --- challenge/Dockerfile | 35 ++++++++++++++++++++++++----------- 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/challenge/Dockerfile b/challenge/Dockerfile index 1efe0c987..66d11c839 100644 --- a/challenge/Dockerfile +++ b/challenge/Dockerfile @@ -50,6 +50,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ apt-get clean && rm -rf /var/lib/apt/lists/* ca-certificates curl + netcat-openbsd socat sudo vim @@ -125,6 +126,14 @@ EOF ################################################################################ +FROM builder as builder-dojjail +RUN < Date: Thu, 21 Mar 2024 13:21:35 -0700 Subject: [PATCH 05/70] Working build with new proxy, cached install, and admin hacker build phase --- challenge/Dockerfile | 1 - challenge/windows/challenge-proxy.c | 334 +++++++++++++++++++++++++++ challenge/windows/config_startup.ps1 | 1 + challenge/windows/install.ps1 | 64 ----- challenge/windows/post_install.ps1 | 30 +++ challenge/windows/post_install.sh | 13 ++ challenge/windows/setup.ps1 | 21 +- challenge/windows/startup.ps1 | 1 - 8 files changed, 383 insertions(+), 82 deletions(-) create mode 100644 challenge/windows/challenge-proxy.c create mode 100644 challenge/windows/config_startup.ps1 delete mode 100644 challenge/windows/install.ps1 create mode 100644 challenge/windows/post_install.ps1 create mode 100755 challenge/windows/post_install.sh diff --git a/challenge/Dockerfile b/challenge/Dockerfile index 66d11c839..fec17aac1 100644 --- a/challenge/Dockerfile +++ b/challenge/Dockerfile @@ -678,7 +678,6 @@ RUN --security=insecure < +#include +#include + +#pragma comment(lib, "ws2_32.lib") // Winsock Library +#pragma comment(lib, "advapi32.lib") // Svc + +// Note: the 3:1 ratio is required +#define NUM_HANDLES 60 +#define MAX_CLIENTS 20 + +void do_proxy(int sockIndex); +BOOL IsDataAvailable(HANDLE hPipe); +void check_on_pipes(); +void exit_service(); + +void setup_network(int port); + +VOID WINAPI SvcCtrlHandler(DWORD dwCtrl); +HANDLE child_handles[NUM_HANDLES]; +SOCKET client_socket[20]; +fd_set readfds; +SOCKET master; + +struct timeval timeval; + +#define SVCNAME "challengeproxy" +SERVICE_STATUS ServiceStatus; +SERVICE_STATUS_HANDLE hStatus; +void ServiceMain(int argc, char** argv); +void ControlHandler(DWORD request); +void InitService(); + + +void exit_service() { + ServiceStatus.dwCurrentState = SERVICE_STOPPED; + SetServiceStatus (hStatus, &ServiceStatus); + exit(1); +} + +void main(int argc, char** argv) { + //Start the control dispatcher thread for the service + SERVICE_TABLE_ENTRY ServiceTable[2]; + ServiceTable[0].lpServiceName = SVCNAME; + ServiceTable[0].lpServiceProc = (LPSERVICE_MAIN_FUNCTION)ServiceMain; + ServiceTable[1].lpServiceName = NULL; + ServiceTable[1].lpServiceProc = NULL; + StartServiceCtrlDispatcher(ServiceTable); +} + +void ServiceMain(int argc, char** argv) { + + hStatus = RegisterServiceCtrlHandler(SVCNAME, SvcCtrlHandler); + + ServiceStatus.dwServiceType = SERVICE_WIN32_OWN_PROCESS; + ServiceStatus.dwServiceSpecificExitCode = 0; + + ServiceStatus.dwCurrentState = SERVICE_START_PENDING; + SetServiceStatus (hStatus, &ServiceStatus); + + InitService(); + ServiceStatus.dwCurrentState = SERVICE_RUNNING; + BOOL res = SetServiceStatus (hStatus, &ServiceStatus); + + SOCKET new_socket, s; + struct sockaddr_in address; + int activity, addrlen, valread; + addrlen = sizeof(struct sockaddr_in); + char buffer[0x1000]; + + while (TRUE) { + // clear the socket set + FD_ZERO(&readfds); + + // add master socket to set + FD_SET(master, &readfds); + int max_sd = master; + + // add child sockets to set + for (int i = 0; i < MAX_CLIENTS; i++) { + // socket descriptor + s = client_socket[i]; + + // if valid socket descriptor then add to read list + if (s > 0) + FD_SET(s, &readfds); + + // highest file descriptor number, need it for the select function + if (s > max_sd) + max_sd = s; + } + + // wait for an activity on one of the sockets, timeout is NULL, so wait + // indefinitely + activity = select(max_sd + 1, &readfds, NULL, NULL, &timeval); + check_on_pipes(); + + if ((activity < 0) && (errno != EINTR)) { + printf("select error"); + exit_service(); + } + + // If something happened on the master socket, then its an incoming + // connection + if (FD_ISSET(master, &readfds)) { + if ((new_socket = accept(master, (struct sockaddr *)&address, + (int *)&addrlen)) < 0) { + perror("accept"); + exit_service(); + } + // printf("New connection, socket fd is %d, ip is : %s, port : %d \n", + // (int) new_socket, inet_ntoa(address.sin_addr), + // ntohs(address.sin_port)); + + // add new socket to array of sockets + for (int i = 0; i < MAX_CLIENTS; i++) { + // if position is empty + if (client_socket[i] == 0) { + client_socket[i] = new_socket; + do_proxy(i); + break; + } + } + } + + // else its some IO operation on some other socket + for (int i = 0; i < MAX_CLIENTS; i++) { + s = client_socket[i]; + + if (FD_ISSET(s, &readfds)) { + // Check if it was for closing, and also read the incoming message + if ((valread = recv(s, buffer, 1024, 0)) == 0) { + // Somebody disconnected, get his details and print + getpeername(s, (struct sockaddr *)&address, (int *)&addrlen); + // printf("Host disconnected, ip %s , port %d \n" , + // inet_ntoa(address.sin_addr) , ntohs(address.sin_port)); + + // Close the socket and mark as 0 in list for reuse + closesocket(s); + client_socket[i] = 0; + } + + // Echo back the message that came in + else { + HANDLE child_stdin = child_handles[i * 3]; + WriteFile(child_stdin, buffer, valread, NULL, NULL); + } + } + } + } + closesocket(s); + WSACleanup(); +} + +void do_proxy(int sockIndex) { + HANDLE g_hChildStd_IN_Rd = NULL; + HANDLE g_hChildStd_IN_Wr = NULL; + HANDLE g_hChildStd_OUT_Rd = NULL; + HANDLE g_hChildStd_OUT_Wr = NULL; + HANDLE g_hChildStd_ERR_Rd = NULL; + HANDLE g_hChildStd_ERR_Wr = NULL; + + char *challenge_needle = "Y:\\*.exe"; + char challenge_path[256]; + WIN32_FIND_DATA find_data; + + PROCESS_INFORMATION piProcInfo; + STARTUPINFO siStartInfo; + + SECURITY_ATTRIBUTES saAttr; + + // Set the bInheritHandle flag so pipe handles are inherited + saAttr.nLength = sizeof(SECURITY_ATTRIBUTES); + saAttr.bInheritHandle = TRUE; + saAttr.lpSecurityDescriptor = NULL; + + // Create a pipe for the child process's STDOUT + if (!CreatePipe(&g_hChildStd_OUT_Rd, &g_hChildStd_OUT_Wr, &saAttr, 0)) + puts("Error: Stdout CreatePipe"); + + // Ensure the read handle to the pipe for STDOUT is not inherited + SetHandleInformation(g_hChildStd_OUT_Rd, HANDLE_FLAG_INHERIT, 0); + + // Create a pipe for the child process's STDERR + if (!CreatePipe(&g_hChildStd_ERR_Rd, &g_hChildStd_ERR_Wr, &saAttr, 0)) + puts("Stderr CreatePipe"); + + // Ensure the read handle to the pipe for STDERR is not inherited + SetHandleInformation(g_hChildStd_ERR_Rd, HANDLE_FLAG_INHERIT, 0); + + // Create a pipe for the child process's STDIN + if (!CreatePipe(&g_hChildStd_IN_Rd, &g_hChildStd_IN_Wr, &saAttr, 0)) + puts("Stdin CreatePipe"); + + // Ensure the write handle to the pipe for STDIN is not inherited + SetHandleInformation(g_hChildStd_IN_Wr, HANDLE_FLAG_INHERIT, 0); + + ZeroMemory(&piProcInfo, sizeof(PROCESS_INFORMATION)); + ZeroMemory(&siStartInfo, sizeof(STARTUPINFO)); + siStartInfo.cb = sizeof(STARTUPINFO); + siStartInfo.hStdError = g_hChildStd_ERR_Wr; + siStartInfo.hStdOutput = g_hChildStd_OUT_Wr; + siStartInfo.hStdInput = g_hChildStd_IN_Rd; + siStartInfo.dwFlags |= STARTF_USESTDHANDLES; + + FindFirstFile(challenge_needle, &find_data); + // Create the child process + + sprintf(challenge_path, "Y:\\%s", find_data.cFileName); + + BOOL bSuccess = + CreateProcessA(NULL, + challenge_path, // Command line + NULL, // Process handle not inheritable + NULL, // Thread handle not inheritable + TRUE, // Set handle inheritance to TRUE + 0, // No creation flags + NULL, // Use parent's environment block + NULL, // Use parent's starting directory + &siStartInfo, // Pointer to STARTUPINFO structure + &piProcInfo); // Pointer to PROCESS_INFORMATION structure + + if (!bSuccess) { + puts("CreateProcess"); + exit_service(); + } else { + // Add the pipes to the list + // second socket, i = 1 + // handles 3, 4, 5 = stdin, stdout, stderr + child_handles[sockIndex * 3] = g_hChildStd_IN_Wr; + child_handles[sockIndex * 3 + 1] = g_hChildStd_OUT_Rd; + child_handles[sockIndex * 3 + 2] = g_hChildStd_ERR_Rd; + + // Close handles to the stdin and stdout pipes no longer needed by the child + // process If they are not explicitly closed, there is no way to recognize + // that the child process has ended + CloseHandle(g_hChildStd_OUT_Wr); + CloseHandle(g_hChildStd_ERR_Wr); + CloseHandle(g_hChildStd_IN_Rd); + } +} + +BOOL IsDataAvailable(HANDLE hPipe) { + DWORD bytesAvailable = 0; + BOOL success = PeekNamedPipe(hPipe, NULL, 0, NULL, &bytesAvailable, NULL); + if (!success) { + return FALSE; + } + return bytesAvailable > 0; +} + +void check_on_pipes() { + // Handles the Challenge -> Socket data routing + + HANDLE target; + char buf[0x1000]; + DWORD bytes_read; + + for (int i = 0; i < NUM_HANDLES; i += 3) { + target = child_handles[i + 1]; + if (IsDataAvailable(target)) { + SOCKET socket = client_socket[i / 3]; + ReadFile(target, buf, 0x1000, &bytes_read, NULL); + send(socket, buf, bytes_read, 0); + } + target = child_handles[i + 2]; + if (IsDataAvailable(target)) { + SOCKET socket = client_socket[i / 3]; + ReadFile(target, buf, 0x1000, &bytes_read, NULL); + send(socket, buf, bytes_read, 0); + } + } +} + +void InitService() { + WSADATA wsa; + struct sockaddr_in server; + + // initialise all client_socket[] to 0 so not checked + for (int i = 0; i < MAX_CLIENTS; i++) + client_socket[i] = 0; + + for (int i = 0; i < MAX_CLIENTS * 3; i++) + child_handles[i] = 0; + + // Initialize timeval + timeval.tv_sec = 0; + timeval.tv_usec = 1000; + + // Initialise Winsock + if (WSAStartup(MAKEWORD(2, 2), &wsa) != 0) { + printf("Failed. Error Code : %d", WSAGetLastError()); + exit_service(); + } + + // Create a socket + if ((master = socket(AF_INET, SOCK_STREAM, 0)) == INVALID_SOCKET) { + printf("Could not create socket : %d", WSAGetLastError()); + exit_service(); + } + + // Prepare the sockaddr_in structure + server.sin_family = AF_INET; + server.sin_addr.s_addr = INADDR_ANY; + server.sin_port = htons(4001); + + // Bind + if (bind(master, (struct sockaddr *)&server, sizeof(server)) == + SOCKET_ERROR) { + printf("Bind failed with error code : %d", WSAGetLastError()); + exit_service(); + } + + // Listen to incoming connections + listen(master, 3); + printf("Waiting for incoming connections...\n"); +} + +VOID WINAPI SvcCtrlHandler( DWORD dwCtrl ) { + // Handle the requested control code. + + switch(dwCtrl) { + case SERVICE_CONTROL_STOP: + exit_service(); + return; + + case SERVICE_CONTROL_INTERROGATE: + break; + + default: + break; + } + +} diff --git a/challenge/windows/config_startup.ps1 b/challenge/windows/config_startup.ps1 new file mode 100644 index 000000000..188e9a635 --- /dev/null +++ b/challenge/windows/config_startup.ps1 @@ -0,0 +1 @@ +Start-Service sshd diff --git a/challenge/windows/install.ps1 b/challenge/windows/install.ps1 deleted file mode 100644 index 0325886a1..000000000 --- a/challenge/windows/install.ps1 +++ /dev/null @@ -1,64 +0,0 @@ -# install.ps1 -# - Single script for installing user applications used in windows challenge VM -# - Infra/Required installs should be placed in setup.ps1 - -# Wrapper obj used to create shortcuts throughout -$WScriptObj = (New-Object -ComObject ("WScript.Shell")) - -# TODO: Move superfetch disable to setup.ps1 -# Disable Superfetch - prevent windows VM dynamically preloading RAM -Stop-Service -Force -Name "SysMain" -Set-Service -Name "SysMain" -StartupType Disabled - -# Install VCLib dependency -Invoke-WebRequest -Uri https://aka.ms/Microsoft.VCLibs.x64.14.00.Desktop.appx -outfile Microsoft.VCLibs.x86.14.00.Desktop.appx -Add-AppxPackage Microsoft.VCLibs.x86.14.00.Desktop.appx -Remove-Item Microsoft.VCLibs.x86.14.00.Desktop.appx - -# choco friendly installs -# Note: Several packages do not install correctly via choco despite -# being packaged, hence the manual installs below - -# install windbg -(New-Object Net.WebClient).DownloadFile("https://windbg.download.prss.microsoft.com/dbazure/prod/1-2308-2002-0/windbg.msixbundle", "C:\windbg.msixbundle") -add-appxpackage -Path C:\windbg.msixbundle -Remove-Item -Force -Path C:\windbg.msixbundle -$windbg_sc = $WScriptObj.CreateShortcut("C:\Users\hacker\Desktop/windbg.lnk") -$windbg_sc.TargetPath = "C:\Users\Hacker\AppData\Local\Microsoft\WindowsApps\WinDbgX.exe" -$windbg_sc.save() - -if ("INSTALL_IDA_FREE" -eq "yes") { - (New-Object Net.WebClient).DownloadFile("https://out7.hex-rays.com/files/idafree82_windows.exe", "C:\idafree.exe") - Start-Process "C:\idafree.exe" -ArgumentList "--unattendedmodeui minimal --mode unattended --installpassword freeware" -Wait - Remove-Item -Force -Path "C:\idafree.exe" -} - -# install Windows Terminal -Invoke-WebRequest -Uri https://github.com/microsoft/terminal/releases/download/v1.7.1091.0/Microsoft.WindowsTerminal_1.7.1091.0_8wekyb3d8bbwe.msixbundle -outfile Microsoft.WindowsTerminal_1.7.1091.0_8wekyb3d8bbwe.msixbundle -Add-AppxPackage -Path .\Microsoft.WindowsTerminal_1.7.1091.0_8wekyb3d8bbwe.msixbundle -Remove-Item Microsoft.WindowsTerminal_1.7.1091.0_8wekyb3d8bbwe.msixbundle - -# x64 Debug -Invoke-WebRequest -Uri https://github.com/x64dbg/x64dbg/releases/download/snapshot/snapshot_2024-02-19_03-16.zip -Outfile x64dbg.zip -Expand-Archive x64dbg.zip -DestinationPath "C:/pwncollege/x64dbg" -Force -Remove-Item x64dbg.zip -$x64dbg_sc = $WScriptObj.CreateShortcut("C:\Users\hacker\Desktop/x64dbg.lnk") -$x64dbg_sc.TargetPath = "C:\pwncollege\x64dbg\release\x96dbg.exe" -$x64dbg_sc.save() - - -# These install correctly with choco, but keeping manual install steps -# Sysinternals -#Invoke-WebRequest -Uri https://download.sysinternals.com/files/SysinternalsSuite.zip -Outfile sysinternals.zip -#Expand-Archive sysinternals.zip -DestinationPath "C:\pwncollege\sysinternals" -Force - -# Process Explorer -#Invoke-WebRequest -Uri https://download.sysinternals.com/files/ProcessExplorer.zip -Outfile procexp.zip -#Expand-Archive procexp.zip -DestinationPath "C:\pwncollege\processExplorer" -Force -#$pe_sc = $WScriptObj.CreateShortcut("C:\Users\hacker\Desktop/Process Explorer.lnk") -#$pe_sc.TargetPath = "C:\pwncollege\procexp64.exe" -#$x64dbg_sc.save() - - -# -- shutdown -- -Stop-Computer -computername localhost -force diff --git a/challenge/windows/post_install.ps1 b/challenge/windows/post_install.ps1 new file mode 100644 index 000000000..f3de9e274 --- /dev/null +++ b/challenge/windows/post_install.ps1 @@ -0,0 +1,30 @@ +# Invokes a Cmd.exe shell script and updates the environment. +function Invoke-CmdScript { + param( + [String] $scriptName + ) + $cmdLine = """$scriptName"" $args & set" + & $Env:SystemRoot\system32\cmd.exe /c $cmdLine | + select-string '^([^=]*)=(.*)$' | foreach-object { + $varName = $_.Matches[0].Groups[1].Value + $varValue = $_.Matches[0].Groups[2].Value + set-item Env:$varName $varValue + } +} +Invoke-CmdScript 'C:/Program Files\Microsoft Visual Studio\2022\Community\VC\Auxiliary\Build\vcvarsall.bat' x86_amd64 +Push-Location 'C:/Program Files/Common Files' +cl challenge-proxy.c + +#Copy-Item -Force challenge-proxy.exe "C:\Program Files\Common Files\" + +# Do not use configuration init any further +#Copy-Item -Force startup.ps1 -Destination "C:\Program Files\Common Files\startup.ps1" +#Remove-Item startup.ps1 + +# -- Add New tasks here to run as hacker with admin privileges --- + + +# -- shutdown -- +Set-Service -Name sshd -StartupType Manual +Set-Service -Name tvnserver -StartupType Manual +Stop-Computer -computername localhost diff --git a/challenge/windows/post_install.sh b/challenge/windows/post_install.sh new file mode 100755 index 000000000..c018e1691 --- /dev/null +++ b/challenge/windows/post_install.sh @@ -0,0 +1,13 @@ +#!/usr/bin/bash +#!/usr/bin/bash + +CON="NOPE" +while [[ $CON != *"SSH"* ]]; do + CON=$(netcat -w10 127.0.0.1 2222) + echo $CON +done + +scp -o "StrictHostKeyChecking=no" -P2222 /opt/windows/post_install.ps1 hacker@127.0.0.1: +scp -o "StrictHostKeyChecking=no" -P2222 /opt/windows/startup.ps1 "hacker@127.0.0.1:\"C:/Program Files/Common Files/\"" +scp -o "StrictHostKeyChecking=no" -P2222 /opt/windows/challenge-proxy.c "hacker@127.0.0.1:\"C:/Program Files/Common Files/\"" +ssh -o "StrictHostKeyChecking=no" -p2222 hacker@127.0.0.1 -- ./post_install.ps1 diff --git a/challenge/windows/setup.ps1 b/challenge/windows/setup.ps1 index 1a688c9b9..40dc90e49 100644 --- a/challenge/windows/setup.ps1 +++ b/challenge/windows/setup.ps1 @@ -60,8 +60,6 @@ pnputil.exe /add-driver E:\virtio-win\viofs\2k22\amd64\viofs.inf /install # ...but when we boot up later without the server ISO it will be in D: & "C:\Program Files (x86)\WinFsp\bin\fsreg.bat" virtiofs "D:\virtio-win\viofs\2k22\amd64\virtiofs.exe" "-t %1 -m %2" -Copy-Item A:\startup.ps1 -Destination "C:\Program Files\Common Files\" -& schtasks /create /tn "dojoinit" /sc onstart /delay 0000:00 /rl highest /ru system /tr "powershell.exe -file 'C:\Program Files\Common Files\startup.ps1'" /f # -- install chocolately -- Set-ExecutionPolicy Bypass -Scope Process -Force; [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072; iex ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1')) @@ -91,20 +89,7 @@ choco install --ignore-detected-reboot tightvnc -y --installArguments 'ADDLOCAL= # this will be done later when the service actually exists #Set-Service -Name tvnserver -StartupType 'Manual' -# -- install rust through rustup (this must be done after MSVC is installed) -- -# WARNING: I learned this the hard way. this binary behaves differently based on argv[0]. -# It must be saved as rustup-init.exe and not rustup.exe. -(New-Object Net.WebClient).DownloadFile("https://win.rustup.rs/x86_64", "C:\rustup-init.exe") -& C:\rustup-init.exe --profile minimal -y -Remove-Item "C:\rustup-init.exe" - -Copy-Item -Recurse "A:\challenge-proxy" "C:\Windows\Temp\" -Push-Location "C:\Windows\Temp\challenge-proxy\" -& $env:USERPROFILE\.cargo\bin\cargo build --release -Copy-Item ".\target\release\challenge-proxy.exe" -Destination "C:\Program Files\Common Files\" -Pop-Location -Remove-Item -Force -Recurse "C:\Windows\Temp\challenge-proxy\" -& sc.exe create ChallengeProxy binPath= "C:\Program Files\Common Files\challenge-proxy.exe" displayname= "Challenge Proxy" depend= TcpIp start= auto +& sc.exe create ChallengeProxy binPath="C:\Program Files\Common Files\challenge-proxy.exe" displayname="Challenge Proxy" depend=TcpIp start=auto if (!(Get-NetFirewallRule -Name "ChallengeProxy-In-TCP" -ErrorAction SilentlyContinue | Select-Object Name, Enabled)) { Write-Output "Firewall Rule 'ChallengeProxy-In-TCP' does not exist, creating it..." @@ -264,5 +249,9 @@ Add-Content -Path $env:windir\System32\drivers\etc\hosts -Value "`n$ip`tmsdl.mic $ip = [System.Net.Dns]::GetHostAddresses("public-lumina.hex-rays.com") Add-Content -Path $env:windir\System32\drivers\etc\hosts -Value "`n$ip`tpublic-lumina.hex-rays.com" -Force +# Unfortunately, launching sshd must be set as a startup file and cannot be done done via the service interface in this file +Copy-Item A:\config_startup.ps1 -Destination "C:\Program Files\Common Files\startup.ps1" +& schtasks /create /tn "dojoinit" /sc onstart /delay 0000:00 /rl highest /ru system /tr "powershell.exe -file 'C:\Program Files\Common Files\startup.ps1'" /f + # -- shutdown -- Stop-Computer -computername localhost -force diff --git a/challenge/windows/startup.ps1 b/challenge/windows/startup.ps1 index 71b7cc2a8..c841e61c9 100644 --- a/challenge/windows/startup.ps1 +++ b/challenge/windows/startup.ps1 @@ -38,4 +38,3 @@ if (Test-Path X:\practice-mode-enabled) { Start-Service sshd Start-Service tvnserver -Set-DisplayResolution -Width 1920 -Height 1200 From 6457a4fcb34030261642609c2e286e0097f6f040 Mon Sep 17 00:00:00 2001 From: Robert Wasinger Date: Thu, 21 Mar 2024 14:14:45 -0700 Subject: [PATCH 06/70] Update Windows Readme --- challenge/windows/README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/challenge/windows/README.md b/challenge/windows/README.md index 8abc70154..2c8cb48db 100644 --- a/challenge/windows/README.md +++ b/challenge/windows/README.md @@ -31,6 +31,17 @@ These are mounted as `Y:` and `Z:` respectively, although they can be configured The challenge mount is also used to pass the flag and information about whether practice mode is enabled to the VM. The startup script will secure the flag in `C:\flag` prior to starting SSH. +## Adding Functionality to the build process + +The build time for the Windows layers is quite long. +As such, consider carefully where in the build process your changes must occur. + +`setup.ps1`: This runs during the windows installation as NT AUTHORITY\SYSTEM. Commands running during this phase may not behave as expected due to execution occurring during installation via Autounattend.xml. + +`post_install.ps1` and `post_install.sh`: These files run **AFTER** the windows installation has completed, and changes will be saved in the docker image. Commands executed in `post_install.ps1` are executed as the `hacker` user while the user is still part of the `Administrators` group. If possible, this is the best location to place changes. **WARNING:** it has been observed that using `Copy-Item` can result in invalid/corrupted data being copied to the destination location. + +`startup.ps1`: This is executed during challenge container runtime every time the windows VM is started. This file is also responsible for removing the `hacker` user from the `Administrators` group, dropping permissions. Adding work to this file will increase windows VM startup time and should be avoided if possible. + ## Building process The build process first repackages Red Hat's `virtio-win-tools` CDROM ISO, which contains needed drivers and executables, in the format that windows expects. From 4fd0c10a1c28d21e482390fff0bf3da87fd60068 Mon Sep 17 00:00:00 2001 From: Robert Wasinger Date: Tue, 26 Mar 2024 23:03:31 -0700 Subject: [PATCH 07/70] remove extraneous windows dir --- windows/Dockerfile | 109 --------------------------------------------- windows/full.yml | 15 ------- 2 files changed, 124 deletions(-) delete mode 100644 windows/Dockerfile delete mode 100644 windows/full.yml diff --git a/windows/Dockerfile b/windows/Dockerfile deleted file mode 100644 index b8cc6c95c..000000000 --- a/windows/Dockerfile +++ /dev/null @@ -1,109 +0,0 @@ -FROM ubuntu:20.04 AS full - -RUN echo 'APT::Install-Recommends 0;' >> /etc/apt/apt.conf.d/01norecommends \ - && echo 'APT::Install-Suggests 0;' >> /etc/apt/apt.conf.d/01norecommends \ - && apt-get update \ - && DEBIAN_FRONTEND=noninteractive apt-get install -y \ - ca-certificates apt-transport-https curl \ - xorriso mtools dosfstools qemu-utils qemu-kvm python3 openssh-client \ - && rm -rf /var/lib/apt/lists/* - -WORKDIR /app - -# IDA Freeware: only permissible for free and open deployments of the dojo! -ARG INSTALL_IDA_FREE=no - -# we create a floppy disk that will hold our Autounattend.xml in the root which will -# allow windows install to proceed automatically. -# We require privileges to create a loop device -COPY ./Autounattend.xml ./setup.ps1 ./startup.ps1 ./sshd_config ./ -COPY ./challenge-proxy ./challenge-proxy -RUN sed -i 's/{INSTALLIDA}/'"$INSTALL_IDA_FREE"'/g' ./setup.ps1 \ - && touch ./practice-mode-enabled \ - && mkfs.fat -F 12 -C ./floppy.img 1440 \ - && mcopy -si ./floppy.img \ - ./Autounattend.xml ./setup.ps1 ./startup.ps1 ./practice-mode-enabled ./sshd_config ./challenge-proxy/ :: \ - && rm -rf ./Autounattend.xml ./setup.ps1 ./startup.ps1 ./practice-mode-enabled ./sshd_config ./challenge-proxy/ - -VOLUME /app/build - -COPY ./shutdown.py . -# we have to repackage the ISO file into the correct format for windows to accept it -# we create a virtio-win directory in the root of the ISO here. This isn't *strictly* -# necessary but it has to correspond with the paths in the Autounattend.xml -# careful... if the drivers can't load for some reason, install will fail with the -# cryptic error "Failed to applay DriveConfiguration". This is because the drivers -# are required to write to the qcow2 image. -CMD \ - ( [ -f ./build/virtio-win-processed.iso ] || ( \ - echo "Building virtio drivers disk" \ - && curl -Lo virtio-win-raw.iso \ - 'https://fedorapeople.org/groups/virt/virtio-win/direct-downloads/latest-virtio/virtio-win.iso' \ - && WORKDIR="$(mktemp -d --suffix=.img-extract)" \ - && EXTRACT_DIR="$WORKDIR/virtio-win" \ - && mkdir -p "$EXTRACT_DIR" \ - && xorriso -report_about SORRY -osirrox on -indev ./virtio-win-raw.iso -extract / "$EXTRACT_DIR" \ - && rm ./virtio-win-raw.iso \ - && xorriso \ - -as genisoimage -rock -joliet -volid VIRTIO \ - -output ./build/virtio-win-processed.iso \ - "$WORKDIR" \ - && rm -rf "$WORKDIR" virtio-win-raw.iso \ - ) ) \ - && ( [ -f ./build/server-2022.iso ] || ( \ - echo "Downloading Windows Server ISO" \ - && curl -Lo ./build/server-2022.iso \ - 'https://software-static.download.prss.microsoft.com/sg/download/888969d5-f34g-4e03-ac9d-1f9786c66749/SERVER_EVAL_x64FRE_en-us.iso' \ - ) ) \ - && ( [ -f ./build/image-stage1-complete ] || ( \ - echo "Creating VM image" \ - && rm -rf ./build/clean.qcow2 ./build/image-built \ - && qemu-img create -f qcow2 ./build/clean.qcow2 51200M \ - && echo "Installing and configuring windows (this will take a while)" \ - && qemu-system-x86_64 \ - -name dojo \ - -boot once=d \ - -machine type=pc,accel=kvm \ - -m 4096M \ - -smp "$(nproc)" \ - -display vnc=:12 \ - -nographic \ - -device virtio-net,netdev=user.0 \ - -netdev user,id=user.0,hostfwd=tcp::5985-:5985,hostfwd=tcp::2222-:22 \ - -serial null \ - `#-monitor unix:./build/monitor.sock,server,nowait` \ - -drive file=./floppy.img,format=raw,index=0,if=floppy \ - -drive "file=./build/server-2022.iso,media=cdrom" \ - -drive "file=./build/virtio-win-processed.iso,media=cdrom" \ - -drive file=./build/clean.qcow2,if=virtio,cache=writeback,discard=ignore,format=qcow2 \ - && rm -rf ./build/monitor.sock \ - && touch ./build/image-stage1-complete \ - ) ) \ - && ( [ -f ./build/image-built ] || ( \ - echo "Performing initial bootup" \ - && ( \ - qemu-system-x86_64 \ - -name dojo \ - -boot once=d \ - -machine type=pc,accel=kvm \ - -m 4096M \ - -smp "$(nproc)" \ - -display vnc=:12 \ - -nographic \ - -device virtio-net,netdev=user.0 \ - -netdev user,id=user.0,hostfwd=tcp::5985-:5985,hostfwd=tcp::2222-:22 \ - -serial null \ - `#-monitor unix:./build/monitor.sock,server,nowait` \ - -drive file=./floppy.img,format=raw,index=0,if=floppy \ - -drive "file=./build/server-2022.iso,media=cdrom" \ - -drive "file=./build/virtio-win-processed.iso,media=cdrom" \ - -drive file=./build/clean.qcow2,if=virtio,cache=writeback,discard=ignore,format=qcow2 \ - & python3 ./shutdown.py \ - ) && rm -f ./build/monitor.sock \ - && touch ./build/image-built \ - && echo "Windows image built" \ - ) ) - -FROM ubuntu:20.04 AS none - -CMD true diff --git a/windows/full.yml b/windows/full.yml deleted file mode 100644 index 31dc1e428..000000000 --- a/windows/full.yml +++ /dev/null @@ -1,15 +0,0 @@ -services: - windows: - build: - context: ./ - target: ${WINDOWS_VM} - args: - - INSTALL_IDA_FREE=${INSTALL_IDA_FREE} - platform: linux/amd64 - devices: - - /dev/kvm - volumes: - - windows:/app/build - ports: - - "5912:5912" - stop_signal: SIGKILL From 8d0980f28d2e4379a36ab69cbe166101e780c84f Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Wed, 27 Mar 2024 22:20:44 -0700 Subject: [PATCH 08/70] Pave the way so that we can just change the config file to host the database on another server --- docker-compose.yml | 15 ++++++++++----- script/container-setup.sh | 4 ++++ script/dojo | 11 ++++++++--- 3 files changed, 22 insertions(+), 8 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 00666e93e..64cd22e98 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -60,7 +60,7 @@ services: hard: 1048576 environment: - UPLOAD_FOLDER=/var/uploads - - DATABASE_URL=mysql+pymysql://ctfd:ctfd@db/ctfd + - DATABASE_URL=mysql+pymysql://${DB_USER}:${DB_PASS}@${DB_HOST}/${DB_NAME} - REDIS_URL=redis://cache:6379 - WORKERS=8 - LOG_FOLDER=/var/log/CTFd @@ -122,10 +122,10 @@ services: image: mariadb:10.4.12 restart: always environment: - - MYSQL_ROOT_PASSWORD=ctfd - - MYSQL_USER=ctfd - - MYSQL_PASSWORD=ctfd - - MYSQL_DATABASE=ctfd + - MYSQL_ROOT_PASSWORD=${DB_PASS} + - MYSQL_USER=${DB_USER} + - MYSQL_PASSWORD=${DB_PASS} + - MYSQL_DATABASE=${DB_NAME} volumes: - ./data/mysql:/var/lib/mysql command: [mysqld, --character-set-server=utf8mb4, --collation-server=utf8mb4_unicode_ci, --wait_timeout=28800, --log-warnings=0] @@ -149,6 +149,11 @@ services: volumes: - ./data/ssh_host_keys:/etc/ssh:ro - /var/run/docker.sock:/var/run/docker.sock:ro + environment: + - DB_HOST=${DB_HOST} + - DB_NAME=${DB_NAME} + - DB_USER=${DB_USER} + - DB_PASS=${DB_PASS} ports: - "22:22" diff --git a/script/container-setup.sh b/script/container-setup.sh index a0aa7a031..f453bca8d 100755 --- a/script/container-setup.sh +++ b/script/container-setup.sh @@ -35,6 +35,10 @@ define INSTALL_DESKTOP_BASE yes # matches the challenge-mini configuration define INSTALL_IDA_FREE no # explicitly disable -- only for free dojos define INSTALL_BINJA_FREE no # explicitly disable -- only for free dojos define INSTALL_WINDOWS no # explicitly disable +define DB_HOST db +define DB_NAME ctfd +define DB_USER ctfd +define DB_PASS ctfd mv $DOJO_DIR/data/.config.env $DOJO_DIR/data/config.env . $DOJO_DIR/data/config.env diff --git a/script/dojo b/script/dojo index e398c43cf..a26bea737 100755 --- a/script/dojo +++ b/script/dojo @@ -13,6 +13,11 @@ fi DOCKER_ARGS=${DOCKER_ARGS:--i} [ -t 0 ] && DOCKER_ARGS="-t $DOCKER_ARGS" +CONTAINER_WITH_MYSQL=sshd + +DOJO_DIR=/opt/pwn.college +. $DOJO_DIR/data/config.env + case "$ACTION" in # HELP: update: update dojo files (warning: does `git pull`), rebuild containers, and restart any changed services "update") @@ -58,20 +63,20 @@ case "$ACTION" in # HELP: db: launch a mysql client session, connected to the ctfd db "db") - docker exec $DOCKER_ARGS db mysql -pctfd -Dctfd ctfd "$@" + docker exec $DOCKER_ARGS ${CONTAINER_WITH_MYSQL} mysql -h ${DB_HOST} -p${DB_PASS} -D${DB_NAME} -u${DB_USER} "$@" ;; # HELP: backup: does a dojo db backup into the `data/backups` directory. "backup") mkdir -p data/backups - docker exec db mysqldump -pctfd --single-transaction --routines --triggers ctfd | gzip > "data/backups/db-$(date -Iseconds).sql.gz" + docker exec ${CONTAINER_WITH_MYSQL} mysqldump -h ${DB_HOST} -p${DB_PASS} -u${DB_USER} --single-transaction --routines --triggers ${DB_NAME} | gzip > "data/backups/db-$(date -Iseconds).sql.gz" ;; # HELP: restore PATH: restores a dojo db backup. Path arg is relative to the `data/backups` directory "restore") BACKUP_PATH="data/backups/$1" if [ -f "$BACKUP_PATH" ]; then - gunzip < "$BACKUP_PATH" | docker exec -i db mysql -pctfd -Dctfd + gunzip < "$BACKUP_PATH" | docker exec -i ${CONTAINER_WITH_MYSQL} mysql -h ${DB_HOST} -p${DB_PASS} -u${DB_USER} -D${DB_NAME} else echo "Error: missing file to restore from" >&2 fi From d6d48ef660348d2ffa5092d5bc0b4458e809404c Mon Sep 17 00:00:00 2001 From: Robert Wasinger Date: Thu, 28 Mar 2024 00:59:37 -0700 Subject: [PATCH 09/70] Windows - Bring back Set-Resolution --- challenge/windows/startup.ps1 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/challenge/windows/startup.ps1 b/challenge/windows/startup.ps1 index c841e61c9..d5afbe5af 100644 --- a/challenge/windows/startup.ps1 +++ b/challenge/windows/startup.ps1 @@ -36,5 +36,7 @@ if (Test-Path X:\practice-mode-enabled) { Add-LocalGroupMember -Group "Administrators" -Member hacker } +logoff 1 + Start-Service sshd Start-Service tvnserver From 5399544db88fa2a4b82b1b14722c414a4b46435a Mon Sep 17 00:00:00 2001 From: Wum1ng <69114599+wumingzhilian@users.noreply.github.com> Date: Thu, 28 Mar 2024 17:20:26 +0800 Subject: [PATCH 10/70] New function to remove challenge in dojos.py (#346) * New function to remove challenge in dojos.py * Set the delete function to be available only to ctfd administrators * Changing a get request to a post request * add test_delete_dojo function in test_running.py * fix ci bug * trigger workflow action * recovery view_dojo_activity auth. --------- Co-authored-by: Pengyu Ding --- dojo_plugin/pages/dojos.py | 21 +++++++++++++++++ dojo_theme/static/js/dojo/settings.js | 33 +++++++++++++++++++++++++-- dojo_theme/templates/dojo_admin.html | 11 +++++++++ test/test_running.py | 8 +++++++ 4 files changed, 71 insertions(+), 2 deletions(-) diff --git a/dojo_plugin/pages/dojos.py b/dojo_plugin/pages/dojos.py index 09958fb10..fad249fd9 100644 --- a/dojo_plugin/pages/dojos.py +++ b/dojo_plugin/pages/dojos.py @@ -112,6 +112,27 @@ def update_dojo(dojo, update_code=None): return {"success": False, "error": str(e)}, 400 return {"success": True} +@dojos.route("/dojo//delete/", methods=["POST"]) +@authed_only +def delete_dojo(dojo): + dojo = Dojos.from_id(dojo).first() + if not dojo: + return {"success": False, "error": "Not Found"}, 404 + + # Check if the current user is an admin of the dojo + if not is_admin(): + abort(403) + + try: + DojoUsers.query.filter(DojoUsers.dojo_id == dojo.dojo_id).delete() + Dojos.query.filter(Dojos.dojo_id == dojo.dojo_id).delete() + db.session.commit() + except Exception as e: + db.session.rollback() + print(f"ERROR: Dojo failed for {dojo}", file=sys.stderr, flush=True) + traceback.print_exc(file=sys.stderr) + return {"success": False, "error": str(e)}, 400 + return {"success": True} @dojos.route("/dojo//admin/") @dojo_route diff --git a/dojo_theme/static/js/dojo/settings.js b/dojo_theme/static/js/dojo/settings.js index ce1b16eb8..05b2e9e78 100644 --- a/dojo_theme/static/js/dojo/settings.js +++ b/dojo_theme/static/js/dojo/settings.js @@ -44,6 +44,35 @@ function form_fetch_and_show(name, endpoint, method, success_message, confirm_ms }); } +function button_fetch_and_show(name, endpoint, method,data, success_message, confirm_msg=null) { + const button = $(`#${name}-button`); + const results = $(`#${name}-results`); + + button.click(()=>{ + results.empty(); + if (confirm_msg && !confirm(confirm_msg(data))) return; + CTFd.fetch(endpoint, { + method: method, + credentials: "same-origin", + headers: { + Accept: "application/json", + "Content-Type": "application/json" + }, + body: JSON.stringify(data) + }).then(response => { + return response.json() + }).then(result => { + if (result.success) { + results.html(success_template); + results.find("#message").text(success_message); + } else { + results.html(error_template); + results.find("#message").html(result.error); + } + }); + }); +} + $(() => { form_fetch_and_show("ssh-key", "/pwncollege_api/v1/ssh_key", "PATCH", "Your public key has been updated"); form_fetch_and_show("dojo-create", "/pwncollege_api/v1/dojo/create", "POST", "Your dojo has been created"); @@ -57,7 +86,7 @@ $(() => { form_fetch_and_show("dojo-award-prune", `/pwncollege_api/v1/dojo/${init.dojo}/prune-awards`, "POST", "Legacy awards have been pruned.", confirm_msg = (form, params) => { return `Prune all awarded emoji based on updated completion requirements?`; }); - + button_fetch_and_show("dojo-delete", `/dojo/${init.dojo}/delete/`, "POST", {dojo: init.dojo} ,"Dojo has been deleted.",x=> `Are you sure you want to delete the dojo "${x.dojo}"? This action cannot be undone.`); $(".copy-button").click((event) => { let input = $(event.target).parents(".input-group").children("input")[0]; input.select(); @@ -71,7 +100,7 @@ $(() => { $(event.target).tooltip("show"); setTimeout(function() { - $(event.target).tooltip("hide"); + $(event.target).tooltip("hide"); }, 1500); }) }); diff --git a/dojo_theme/templates/dojo_admin.html b/dojo_theme/templates/dojo_admin.html index 162ffc93d..ead7b8b08 100644 --- a/dojo_theme/templates/dojo_admin.html +++ b/dojo_theme/templates/dojo_admin.html @@ -9,6 +9,7 @@

{{ dojo.name }}

+
@@ -28,6 +29,16 @@

{{ dojo.name }}

+ {% if is_admin() %} + + {% endif %}
diff --git a/test/test_running.py b/test/test_running.py index bd74e29ab..a9110e159 100644 --- a/test/test_running.py +++ b/test/test_running.py @@ -77,6 +77,14 @@ def test_create_dojo(example_dojo, admin_session): assert admin_session.get(f"{PROTO}://{HOST}/example/").status_code == 200 +@pytest.mark.dependency(depends=["test_create_dojo"]) +def test_delete_dojo(admin_session): + reference_id = create_dojo_yml("""id: delete-test""", session=admin_session) + assert admin_session.get(f"{PROTO}://{HOST}/{reference_id}/").status_code == 200 + assert admin_session.post(f"{PROTO}://{HOST}/dojo/{reference_id}/delete/", json={"dojo": reference_id}).status_code == 200 + assert admin_session.get(f"{PROTO}://{HOST}/{reference_id}/").status_code == 404 + + @pytest.mark.dependency(depends=["test_create_dojo"]) def test_create_import_dojo(example_import_dojo, admin_session): assert admin_session.get(f"{PROTO}://{HOST}/{example_import_dojo}/").status_code == 200 From faab8c0e7a4bb42131e84a341d062f1aa0544d66 Mon Sep 17 00:00:00 2001 From: Yan Date: Wed, 27 Mar 2024 12:52:52 -0700 Subject: [PATCH 11/70] move raw-yaml dojo creation into the normal dojo endpoint (still gated by is_admin), create directories for raw-yaml dojos --- dojo_plugin/api/v1/dojo.py | 50 ++++++++++++-------------------------- dojo_plugin/utils/dojo.py | 8 ++++++ test/utils.py | 2 +- 3 files changed, 24 insertions(+), 36 deletions(-) diff --git a/dojo_plugin/api/v1/dojo.py b/dojo_plugin/api/v1/dojo.py index e344ba6af..25f03c050 100644 --- a/dojo_plugin/api/v1/dojo.py +++ b/dojo_plugin/api/v1/dojo.py @@ -25,45 +25,30 @@ from CTFd.utils.security.sanitize import sanitize_html from ...models import Dojos, DojoMembers, DojoAdmins, DojoUsers, Emojis -from ...utils.dojo import dojo_accessible, dojo_clone, dojo_from_dir, dojo_from_spec, dojo_route, dojo_admins_only +from ...utils.dojo import dojo_accessible, dojo_clone, dojo_from_dir, dojo_yml_dir, dojo_route, dojo_admins_only dojo_namespace = Namespace( "dojo", description="Endpoint to manage dojos" ) -def create_dojo_yml(user, spec): - DOJO_EXISTS = "This repository already exists as a dojo" - try: - dojo = dojo_from_spec(spec) - dojo.admins = [DojoAdmins(user=user)] - - db.session.add(dojo) - db.session.commit() - except IntegrityError as e: - return {"success": False, "error": DOJO_EXISTS}, 400 - - except AssertionError as e: - return {"success": False, "error": str(e)}, 400 - - except Exception as e: - print("ERROR: Dojo from spec failed", file=sys.stderr, flush=True) - traceback.print_exc(file=sys.stderr) - return {"success": False, "error": str(e)}, 400 - - return {"success": True, "dojo": dojo.reference_id}, 200 - -def create_dojo(user, repository, public_key, private_key): +def create_dojo(user, repository, public_key, private_key, spec): DOJO_EXISTS = "This repository already exists as a dojo" try: - repository_re = r"[\w\-]+/[\w\-]+" - repository = repository.replace("https://github.com/", "") - assert re.match(repository_re, repository), f"Invalid repository, expected format: {repository_re}" + if repository: + repository_re = r"[\w\-]+/[\w\-]+" + repository = repository.replace("https://github.com/", "") + assert re.match(repository_re, repository), f"Invalid repository, expected format: {repository_re}" + + assert not Dojos.query.filter_by(repository=repository).first(), DOJO_EXISTS - assert not Dojos.query.filter_by(repository=repository).first(), DOJO_EXISTS + dojo_dir = dojo_clone(repository, private_key) + elif spec: + assert is_admin(), "Must be an admin user to create dojos from spec rather than repositories" + dojo_dir = dojo_yml_dir(spec) + repository, public_key, private_key = None, None, None - dojo_dir = dojo_clone(repository, private_key) dojo_path = pathlib.Path(dojo_dir.name) dojo = dojo_from_dir(dojo_path) @@ -141,12 +126,6 @@ def post(self, dojo): db.session.commit() return {"success": True} -@dojo_namespace.route("/create-spec") -class CreateDojoSpec(Resource): - @admins_only - def post(self): - return create_dojo_yml(get_current_user(), yaml.safe_load(request.get_json()["spec"])) - @dojo_namespace.route("/create") class CreateDojo(Resource): @authed_only @@ -155,6 +134,7 @@ def post(self): user = get_current_user() repository = data.get("repository", "") + spec = data.get("spec", "") public_key = data.get("public_key", "") private_key = data.get("private_key", "").replace("\r\n", "\n") @@ -164,7 +144,7 @@ def post(self): if not is_admin() and cache.get(key) is not None: return {"success": False, "error": "You can only create 1 dojo per day."}, 429 - result = create_dojo(user, repository, public_key, private_key) + result = create_dojo(user, repository, public_key, private_key, spec) if result[0]["success"]: cache.set(key, 1, timeout=timeout) diff --git a/dojo_plugin/utils/dojo.py b/dojo_plugin/utils/dojo.py index e96dddb72..cbe125436 100644 --- a/dojo_plugin/utils/dojo.py +++ b/dojo_plugin/utils/dojo.py @@ -337,6 +337,14 @@ def generate_ssh_keypair(): return (public_key.read_text().strip(), private_key.read_text()) +def dojo_yml_dir(spec): + tmp_dojos_dir = DOJOS_DIR / "tmp" + tmp_dojos_dir.mkdir(exist_ok=True) + yml_dir = tempfile.TemporaryDirectory(dir=tmp_dojos_dir) # TODO: ignore_cleanup_errors=True + yml_dir_path = pathlib.Path(yml_dir.name) + with open(yml_dir_path / "dojo.yml", "w") as do: + do.write(spec) + return yml_dir def dojo_clone(repository, private_key): tmp_dojos_dir = DOJOS_DIR / "tmp" diff --git a/test/utils.py b/test/utils.py index 5738725c7..2fba0d89f 100644 --- a/test/utils.py +++ b/test/utils.py @@ -46,7 +46,7 @@ def create_dojo(repository, *, session): return dojo_reference_id def create_dojo_yml(spec, *, session): - response = session.post(f"{PROTO}://{HOST}/pwncollege_api/v1/dojo/create-spec", json={"spec": spec}) + response = session.post(f"{PROTO}://{HOST}/pwncollege_api/v1/dojo/create", json={"spec": spec}) assert response.status_code == 200, f"Expected status code 200, but got {response.status_code} - {response.json()}" dojo_reference_id = response.json()["dojo"] return dojo_reference_id From 9087fbd973ebb318ec609eeb2844bf9c76430b67 Mon Sep 17 00:00:00 2001 From: Yan Date: Wed, 27 Mar 2024 13:48:51 -0700 Subject: [PATCH 12/70] implement dojo-lfs, allowing large files to be downloaded into a dojo post-clone --- dojo_plugin/utils/dojo.py | 21 +++++++++++++++++++++ test/conftest.py | 4 ++++ test/dojos/lfs_dojo.yml | 10 ++++++++++ test/test_running.py | 10 ++++++++++ 4 files changed, 45 insertions(+) create mode 100644 test/dojos/lfs_dojo.yml diff --git a/dojo_plugin/utils/dojo.py b/dojo_plugin/utils/dojo.py index cbe125436..a0fe6ed52 100644 --- a/dojo_plugin/utils/dojo.py +++ b/dojo_plugin/utils/dojo.py @@ -7,6 +7,7 @@ import contextlib import inspect import pathlib +import urllib.request import yaml import requests @@ -25,6 +26,8 @@ UNIQUE_ID_REGEX = Regex(r"^[a-z0-9-~]{1,128}$") NAME_REGEX = Regex(r"^[\S ]{1,128}$") IMAGE_REGEX = Regex(r"^[\S]{1,256}$") +FILE_PATH_REGEX = Regex(r"^[A-Za-z0-9_][A-Za-z0-9-_./]*$") +FILE_URL_REGEX = Regex(r"^https://www.dropbox.com/[a-zA-Z0-9]*/[a-zA-Z0-9]*/[a-zA-Z0-9]*/[a-zA-Z0-9.-_]*?rlkey=[a-zA-Z0-9]*&dl=1") DATE = Use(datetime.datetime.fromisoformat) ID_NAME_DESCRIPTION = { @@ -106,6 +109,13 @@ }, )], }], + Optional("files", default=[]): [ + { + "type": "download", + "path": FILE_PATH_REGEX, + "url": FILE_URL_REGEX, + } + ], }) def setdefault_name(entry): @@ -169,6 +179,16 @@ def load_dojo_subyamls(data, dojo_dir): return data +def dojo_initialize_files(data, dojo_dir): + for dojo_file in data.get("files", []): + rel_path = dojo_dir / dojo_file["path"] + abs_path = dojo_dir / rel_path + assert not abs_path.is_symlink(), f"{rel_path} is a symbolic link!" + if dojo_file["type"] == "download": + abs_path.parent.mkdir(parents=True, exist_ok=True) + urllib.request.urlretrieve(dojo_file["url"], str(abs_path)) + assert abs_path.stat().st_size >= 50*1024*1024, f"{rel_path} is small enough to fit into git ({abs_path.stat().st_size} bytes) --- put it in the repository!" + def dojo_from_dir(dojo_dir, *, dojo=None): dojo_yml_path = dojo_dir / "dojo.yml" assert dojo_yml_path.exists(), "Missing file: `dojo.yml`" @@ -178,6 +198,7 @@ def dojo_from_dir(dojo_dir, *, dojo=None): data_raw = yaml.safe_load(dojo_yml_path.read_text()) data = load_dojo_subyamls(data_raw, dojo_dir) + dojo_initialize_files(data, dojo_dir) return dojo_from_spec(data, dojo_dir=dojo_dir, dojo=dojo) def dojo_from_spec(data, *, dojo_dir=None, dojo=None): diff --git a/test/conftest.py b/test/conftest.py index 658a25bc4..b394dfc69 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -74,6 +74,10 @@ def no_import_challenge_dojo(admin_session): def no_practice_dojo(admin_session): return create_dojo_yml(open(TEST_DOJOS_LOCATION / "no_practice_dojo.yml").read(), session=admin_session) +@pytest.fixture(scope="session") +def lfs_dojo(admin_session): + return create_dojo_yml(open(TEST_DOJOS_LOCATION / "lfs_dojo.yml").read(), session=admin_session) + @pytest.fixture(scope="session") def welcome_dojo(admin_session): try: diff --git a/test/dojos/lfs_dojo.yml b/test/dojos/lfs_dojo.yml new file mode 100644 index 000000000..52148db75 --- /dev/null +++ b/test/dojos/lfs_dojo.yml @@ -0,0 +1,10 @@ +id: lfs +type: public +modules: + - id: test + challenges: + - id: test +files: + - type: download + url: "https://www.dropbox.com/scl/fi/deyhfwioo3d824ext4k42/dojo.txt?rlkey=h1wmm4oe9hq67ooan9oh2r36j&dl=1" + path: "test/test/dojo.txt" diff --git a/test/test_running.py b/test/test_running.py index a9110e159..e6f26f8a6 100644 --- a/test/test_running.py +++ b/test/test_running.py @@ -148,6 +148,16 @@ def test_no_practice(no_practice_challenge_dojo, no_practice_dojo, random_user): assert not response.json()["success"] assert "practice" in response.json()["error"] +@pytest.mark.dependency(depends=["test_join_dojo"]) +def test_lfs(lfs_dojo, random_user): + uid, session = random_user + assert session.get(f"{PROTO}://{HOST}/dojo/{lfs_dojo}/join/").status_code == 200 + start_challenge(lfs_dojo, "test", "test", session=session) + try: + workspace_run("[ -f '/challenge/dojo.txt' ]", user=uid) + except subprocess.CalledProcessError: + assert False, "LFS didn't create dojo.txt" + @pytest.mark.dependency(depends=["test_join_dojo"]) def test_no_import(no_import_challenge_dojo, admin_session): try: From adf053041e0f862310f2651360e6c825c4d05939 Mon Sep 17 00:00:00 2001 From: Yan Date: Thu, 28 Mar 2024 01:59:12 -0700 Subject: [PATCH 13/70] gate LFS behind admin privs --- dojo_plugin/utils/dojo.py | 1 + 1 file changed, 1 insertion(+) diff --git a/dojo_plugin/utils/dojo.py b/dojo_plugin/utils/dojo.py index a0fe6ed52..e4c0aa38f 100644 --- a/dojo_plugin/utils/dojo.py +++ b/dojo_plugin/utils/dojo.py @@ -181,6 +181,7 @@ def load_dojo_subyamls(data, dojo_dir): def dojo_initialize_files(data, dojo_dir): for dojo_file in data.get("files", []): + assert is_admin(), f"LFS support requires admin privileges" rel_path = dojo_dir / dojo_file["path"] abs_path = dojo_dir / rel_path assert not abs_path.is_symlink(), f"{rel_path} is a symbolic link!" From 0c09c8cc3afb01c878dd0499b280a0bae6203cd1 Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Fri, 29 Mar 2024 09:22:37 -0700 Subject: [PATCH 14/70] Dojo: Increase admin visibility (#369) --- dojo_plugin/pages/dojos.py | 12 ++++-------- dojo_plugin/utils/dojo.py | 2 +- dojo_theme/templates/dojo.html | 2 +- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/dojo_plugin/pages/dojos.py b/dojo_plugin/pages/dojos.py index fad249fd9..f0b640ed3 100644 --- a/dojo_plugin/pages/dojos.py +++ b/dojo_plugin/pages/dojos.py @@ -13,7 +13,7 @@ from ..models import DojoAdmins, DojoChallenges, DojoMembers, DojoModules, DojoUsers, Dojos from ..utils import user_dojos -from ..utils.dojo import dojo_route, generate_ssh_keypair, dojo_update +from ..utils.dojo import dojo_route, generate_ssh_keypair, dojo_update, dojo_admins_only dojos = Blueprint("pwncollege_dojos", __name__) @@ -136,18 +136,15 @@ def delete_dojo(dojo): @dojos.route("/dojo//admin/") @dojo_route +@dojo_admins_only def view_dojo_admin(dojo): - if not dojo.is_admin(): - abort(403) return render_template("dojo_admin.html", dojo=dojo, is_admin=is_admin) @dojos.route("/dojo//admin/activity") @dojo_route +@dojo_admins_only def view_dojo_activity(dojo): - if not dojo.is_admin(): - abort(403) - docker_client = docker.from_env() filters = { "name": "user_", @@ -179,9 +176,8 @@ def view_dojo_activity(dojo): @dojos.route("/dojo//admin/solves.csv") @dojo_route +@dojo_admins_only def view_dojo_solves(dojo): - if not dojo.is_admin(): - abort(403) def stream(): yield "user,module,challenge,time\n" solves = ( diff --git a/dojo_plugin/utils/dojo.py b/dojo_plugin/utils/dojo.py index e4c0aa38f..47ef71a70 100644 --- a/dojo_plugin/utils/dojo.py +++ b/dojo_plugin/utils/dojo.py @@ -425,7 +425,7 @@ def wrapper(*args, **kwargs): bound_args.apply_defaults() dojo = bound_args.arguments["dojo"] - if not dojo.is_admin(get_current_user()): + if not (dojo.is_admin(get_current_user()) or is_admin()): abort(403) return func(*bound_args.args, **bound_args.kwargs) return wrapper diff --git a/dojo_theme/templates/dojo.html b/dojo_theme/templates/dojo.html index 3d3e3218e..1469711f0 100644 --- a/dojo_theme/templates/dojo.html +++ b/dojo_theme/templates/dojo.html @@ -25,7 +25,7 @@

{{ dojo.name or dojo.id }}

{% endif %} - {% if dojo_user.type == "admin" %} + {% if dojo_user.type == "admin" or user.type == "admin" %}
From 790f74640f52ed53db8b170c55e0a2b4b4b30de9 Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 29 Mar 2024 09:58:01 -0700 Subject: [PATCH 15/70] Allow better support of externally hosted DBs. This way, the docker-compose.yml file does not need to change to support an external db. --- db/Dockerfile | 6 ++++++ db/start.sh | 8 ++++++++ docker-compose.yml | 8 +++++--- script/container-setup.sh | 1 + script/dojo | 2 +- 5 files changed, 21 insertions(+), 4 deletions(-) create mode 100644 db/Dockerfile create mode 100755 db/start.sh diff --git a/db/Dockerfile b/db/Dockerfile new file mode 100644 index 000000000..b6f4bd731 --- /dev/null +++ b/db/Dockerfile @@ -0,0 +1,6 @@ +FROM mariadb:10.4.12 + +COPY ./start.sh /start.sh +RUN chmod +x /start.sh +USER mysql + diff --git a/db/start.sh b/db/start.sh new file mode 100755 index 000000000..30324c270 --- /dev/null +++ b/db/start.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +if [ "${DB_EXTERNAL:-no}" = "no" ] +then + mysqld --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci --wait_timeout=28800 --log-warnings=0 +else + while true; do sleep 86400; done +fi diff --git a/docker-compose.yml b/docker-compose.yml index 64cd22e98..2368aa9cd 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -119,18 +119,20 @@ services: db: container_name: db - image: mariadb:10.4.12 + build: + context: ./db restart: always environment: - MYSQL_ROOT_PASSWORD=${DB_PASS} - MYSQL_USER=${DB_USER} - MYSQL_PASSWORD=${DB_PASS} - MYSQL_DATABASE=${DB_NAME} + - DB_EXTERNAL=${DB_EXTERNAL} volumes: - ./data/mysql:/var/lib/mysql - command: [mysqld, --character-set-server=utf8mb4, --collation-server=utf8mb4_unicode_ci, --wait_timeout=28800, --log-warnings=0] + command: [/start.sh] healthcheck: - test: ["CMD", "mysqladmin", "ping", "-pctfd"] + test: ["CMD", "mysqladmin", "ping", "-p${DB_PASS}", "-u${DB_USER}", "-h${DB_HOST}"] interval: 10s timeout: 10s retries: 3 diff --git a/script/container-setup.sh b/script/container-setup.sh index f453bca8d..9d51ab748 100755 --- a/script/container-setup.sh +++ b/script/container-setup.sh @@ -39,6 +39,7 @@ define DB_HOST db define DB_NAME ctfd define DB_USER ctfd define DB_PASS ctfd +define DB_EXTERNAL no # change to anything but no and the db container will not start mysql mv $DOJO_DIR/data/.config.env $DOJO_DIR/data/config.env . $DOJO_DIR/data/config.env diff --git a/script/dojo b/script/dojo index a26bea737..db585b061 100755 --- a/script/dojo +++ b/script/dojo @@ -13,7 +13,7 @@ fi DOCKER_ARGS=${DOCKER_ARGS:--i} [ -t 0 ] && DOCKER_ARGS="-t $DOCKER_ARGS" -CONTAINER_WITH_MYSQL=sshd +CONTAINER_WITH_MYSQL=db DOJO_DIR=/opt/pwn.college . $DOJO_DIR/data/config.env From de9e8e1a51951424a06d7f3387cfe7a2d39d7843 Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 29 Mar 2024 10:32:13 -0700 Subject: [PATCH 16/70] Remove unnecessary dockerfile command. --- db/Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/db/Dockerfile b/db/Dockerfile index b6f4bd731..b9b6f8d9a 100644 --- a/db/Dockerfile +++ b/db/Dockerfile @@ -1,6 +1,5 @@ FROM mariadb:10.4.12 COPY ./start.sh /start.sh -RUN chmod +x /start.sh USER mysql From b83436e5ed65ecb85cff85ef09332c2a5429b84e Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 29 Mar 2024 10:38:32 -0700 Subject: [PATCH 17/70] dojo start is run before the config.env is created, so only include it if it exists. --- script/dojo | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/script/dojo b/script/dojo index db585b061..40a013c25 100755 --- a/script/dojo +++ b/script/dojo @@ -16,7 +16,9 @@ DOCKER_ARGS=${DOCKER_ARGS:--i} CONTAINER_WITH_MYSQL=db DOJO_DIR=/opt/pwn.college -. $DOJO_DIR/data/config.env +if [ -f $DOJO_DIR/data/config.env ]; then + . $DOJO_DIR/data/config.env +fi case "$ACTION" in # HELP: update: update dojo files (warning: does `git pull`), rebuild containers, and restart any changed services From 2a62598f06e09ecec2b14ef2e9753959bd5d356c Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 29 Mar 2024 11:10:38 -0700 Subject: [PATCH 18/70] Confirm to the insane mariadb dockerfile --- db/Dockerfile | 2 -- db/start.sh | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/db/Dockerfile b/db/Dockerfile index b9b6f8d9a..bcb5bafd7 100644 --- a/db/Dockerfile +++ b/db/Dockerfile @@ -1,5 +1,3 @@ FROM mariadb:10.4.12 COPY ./start.sh /start.sh -USER mysql - diff --git a/db/start.sh b/db/start.sh index 30324c270..efc41a371 100755 --- a/db/start.sh +++ b/db/start.sh @@ -2,7 +2,7 @@ if [ "${DB_EXTERNAL:-no}" = "no" ] then - mysqld --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci --wait_timeout=28800 --log-warnings=0 + /docker-entrypoint.sh mysqld --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci --wait_timeout=28800 --log-warnings=0 else while true; do sleep 86400; done fi From ed47828c515bb755b0a484d57856fe01f06a5a3e Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 29 Mar 2024 14:42:20 -0700 Subject: [PATCH 19/70] Add two more config variables so that prod docker-compose.yml can be the same as dev. --- docker-compose.yml | 4 ++-- script/container-setup.sh | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 2368aa9cd..9d65f894f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -81,9 +81,9 @@ services: - DISCORD_BOT_TOKEN=${DISCORD_BOT_TOKEN} - DISCORD_GUILD_ID=${DISCORD_GUILD_ID} - INTERNET_FOR_ALL=${INTERNET_FOR_ALL} - - VIRTUAL_HOST=${DOJO_HOST},localhost + - VIRTUAL_HOST=${VIRTUAL_HOST} - VIRTUAL_PORT=8000 - - LETSENCRYPT_HOST=${DOJO_HOST} + - LETSENCRYPT_HOST=${LETSENCRYPT_HOST} volumes: - ./data:/var/data - ./data/CTFd/logs:/var/log/CTFd diff --git a/script/container-setup.sh b/script/container-setup.sh index 9d51ab748..7f774d365 100755 --- a/script/container-setup.sh +++ b/script/container-setup.sh @@ -40,6 +40,8 @@ define DB_NAME ctfd define DB_USER ctfd define DB_PASS ctfd define DB_EXTERNAL no # change to anything but no and the db container will not start mysql +define VIRTUAL_HOST "${DOJO_HOST},localhost" +define LETSENCRYPT_HOST "${DOJO_HOST}" mv $DOJO_DIR/data/.config.env $DOJO_DIR/data/config.env . $DOJO_DIR/data/config.env From b82be99a1b92a134f047be3d033436045bead150 Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 29 Mar 2024 16:06:58 -0700 Subject: [PATCH 20/70] Define did not work like I thought --- script/container-setup.sh | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/script/container-setup.sh b/script/container-setup.sh index 7f774d365..4d8384b5f 100755 --- a/script/container-setup.sh +++ b/script/container-setup.sh @@ -14,7 +14,11 @@ define () { value="${current:-${defined:-$default}}" echo "${name}=${value}" >> $DOJO_DIR/data/.config.env } -define DOJO_HOST localhost.pwn.college +DEFAULT_DOJO_HOST=localhost.pwn.college + +define DOJO_HOST "${DEFAULT_DOJO_HOST}" +define VIRTUAL_HOST "${DEFAULT_DOJO_HOST},localhost" +define LETSENCRYPT_HOST "${DEFAULT_DOJO_HOST}" define DOJO_ENV development define DOJO_CHALLENGE challenge-mini define SECRET_KEY $(openssl rand -hex 16) @@ -40,8 +44,6 @@ define DB_NAME ctfd define DB_USER ctfd define DB_PASS ctfd define DB_EXTERNAL no # change to anything but no and the db container will not start mysql -define VIRTUAL_HOST "${DOJO_HOST},localhost" -define LETSENCRYPT_HOST "${DOJO_HOST}" mv $DOJO_DIR/data/.config.env $DOJO_DIR/data/config.env . $DOJO_DIR/data/config.env From b92d7dabbb58dabee2bec9328dcf4a6cdd049bdc Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 29 Mar 2024 16:51:51 -0700 Subject: [PATCH 21/70] more files to ignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 62ff2770e..5e0942ea2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ data/ *.pyc .DS_Store +opt/ +sensai/ From c050a1666170f64a099a4228d132861db7b3f073 Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 29 Mar 2024 22:26:25 -0700 Subject: [PATCH 22/70] Update README.md with the updating sequence. Closes #380. --- README.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/README.md b/README.md index e2529c5f8..d83af8db0 100644 --- a/README.md +++ b/README.md @@ -69,6 +69,30 @@ The following options are available: - `challenge-mini`: Adds a minified desktop (the default). - `challenge-full`: The full (70+ GB) setup. +## Updating + +When updating your dojo deployment, there is only one supported method in the `dojo` directory: + +```sh +docker kill pwncollege/dojo +docker rm pwncollege/dojo +git pull +docker build -t pwncollege/dojo "$DOJO_PATH" +docker run --privileged -d -v "${DOJO_PATH}:/opt/pwn.college:shared" -p 22:22 -p 80:80 -p 443:443 --name dojo pwncollege/dojo +``` + +This will cause downtime when the dojo is rebuilding. + +Some changes _can_ be applied without a complete restart, however this is not guaranteed. + +If you really know what you're doing (the changes that you're pulling in are just to `ctfd`), inside the `pwncollege/dojo` container you can do the following: + +```sh +dojo update +``` + +Note that `dojo update` is not guaranteed to be successful and should only be used if you fully understand each commit/change that you are updating. + ## Customization _All_ dojo data will be stored in the `./data` directory. From 80ec63dffcf84c8096d89a582953fbc510cdf60c Mon Sep 17 00:00:00 2001 From: Yan Date: Sat, 30 Mar 2024 00:23:36 -0700 Subject: [PATCH 23/70] i must have forgotten to commit this --- don't redownload LFS files on update --- dojo_plugin/utils/dojo.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dojo_plugin/utils/dojo.py b/dojo_plugin/utils/dojo.py index 47ef71a70..8111bbe89 100644 --- a/dojo_plugin/utils/dojo.py +++ b/dojo_plugin/utils/dojo.py @@ -186,6 +186,8 @@ def dojo_initialize_files(data, dojo_dir): abs_path = dojo_dir / rel_path assert not abs_path.is_symlink(), f"{rel_path} is a symbolic link!" if dojo_file["type"] == "download": + if abs_path.exists(): + continue abs_path.parent.mkdir(parents=True, exist_ok=True) urllib.request.urlretrieve(dojo_file["url"], str(abs_path)) assert abs_path.stat().st_size >= 50*1024*1024, f"{rel_path} is small enough to fit into git ({abs_path.stat().st_size} bytes) --- put it in the repository!" From f083faf022852ad8e7201b85140937b586f9384a Mon Sep 17 00:00:00 2001 From: Yan Date: Sat, 30 Mar 2024 00:25:56 -0700 Subject: [PATCH 24/70] move is_admin check to only hit when downloading new files --- dojo_plugin/utils/dojo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo_plugin/utils/dojo.py b/dojo_plugin/utils/dojo.py index 8111bbe89..0ec51003e 100644 --- a/dojo_plugin/utils/dojo.py +++ b/dojo_plugin/utils/dojo.py @@ -181,13 +181,13 @@ def load_dojo_subyamls(data, dojo_dir): def dojo_initialize_files(data, dojo_dir): for dojo_file in data.get("files", []): - assert is_admin(), f"LFS support requires admin privileges" rel_path = dojo_dir / dojo_file["path"] abs_path = dojo_dir / rel_path assert not abs_path.is_symlink(), f"{rel_path} is a symbolic link!" if dojo_file["type"] == "download": if abs_path.exists(): continue + assert is_admin(), f"LFS download support requires admin privileges" abs_path.parent.mkdir(parents=True, exist_ok=True) urllib.request.urlretrieve(dojo_file["url"], str(abs_path)) assert abs_path.stat().st_size >= 50*1024*1024, f"{rel_path} is small enough to fit into git ({abs_path.stat().st_size} bytes) --- put it in the repository!" From 2641371cc8efd18a3c3c1a5ac0ad51e11a0f1710 Mon Sep 17 00:00:00 2001 From: Robert Wasinger Date: Thu, 28 Mar 2024 21:46:22 -0700 Subject: [PATCH 25/70] Linux VM - Support challenge defined bzImage and vmlinux --- challenge/vm/vm | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/challenge/vm/vm b/challenge/vm/vm index 1f2ebaf0e..f0a622b24 100755 --- a/challenge/vm/vm +++ b/challenge/vm/vm @@ -12,6 +12,7 @@ import tempfile import textwrap import time +from glob import glob LINUX_REBOOT_CMD_POWER_OFF = 0x4321fedc LINUX_REBOOT_CMD_RESTART = 0x1234567 @@ -87,11 +88,14 @@ def execve(argv): def start(): flags = " ".join(flag for flag in extra_boot_flags()) + custom_bz = glob("/challenge/bzImage*") + bzImage = custom_bz[0] if custom_bz else "/opt/linux/bzImage" + kvm = os.path.exists("/dev/kvm") cpu = "host" if kvm else "qemu64" qemu_argv = [ "/usr/bin/qemu-system-x86_64", - "-kernel", "/opt/linux/bzImage", + "-kernel", bzImage, "-cpu", f"{cpu},smep,smap", "-fsdev", "local,id=rootfs,path=/,security_model=passthrough", "-device", "virtio-9p-pci,fsdev=rootfs,mount_tag=/dev/root", @@ -186,10 +190,13 @@ def debug(): except ConnectionRefusedError: error("Error: could not connect to debug") + custom_vmlinux = glob("/challenge/vmlinux*") + vmlinux = custom_vmlinux[0] if custom_vmlinux else "/opt/linux/vmlinux" + execve([ "/usr/bin/gdb", "--ex", "target remote localhost:1234", - "/opt/linux/vmlinux", + vmlinux, ]) From e4d99dcbc294c0f9e9fb1e8e01e10e468aaea1ae Mon Sep 17 00:00:00 2001 From: Yan Date: Sat, 30 Mar 2024 14:07:29 -0700 Subject: [PATCH 26/70] adopt connor's suggestions --- challenge/vm/vm | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/challenge/vm/vm b/challenge/vm/vm index f0a622b24..2a5040c52 100755 --- a/challenge/vm/vm +++ b/challenge/vm/vm @@ -12,8 +12,6 @@ import tempfile import textwrap import time -from glob import glob - LINUX_REBOOT_CMD_POWER_OFF = 0x4321fedc LINUX_REBOOT_CMD_RESTART = 0x1234567 @@ -88,8 +86,7 @@ def execve(argv): def start(): flags = " ".join(flag for flag in extra_boot_flags()) - custom_bz = glob("/challenge/bzImage*") - bzImage = custom_bz[0] if custom_bz else "/opt/linux/bzImage" + bzImage = "/challenge/bzImage" if os.path.exists("/challenge/bzImage") else "/opt/linux/bzImage" kvm = os.path.exists("/dev/kvm") cpu = "host" if kvm else "qemu64" @@ -190,8 +187,7 @@ def debug(): except ConnectionRefusedError: error("Error: could not connect to debug") - custom_vmlinux = glob("/challenge/vmlinux*") - vmlinux = custom_vmlinux[0] if custom_vmlinux else "/opt/linux/vmlinux" + vmlinux = "/challenge/vmlinux" if os.path.exists("/challenge/vmlinux") else "/opt/linux/vmlinux" execve([ "/usr/bin/gdb", From 2aad3a57844cd0f6527f49b85d7c42db22765d59 Mon Sep 17 00:00:00 2001 From: Robert Wasinger Date: Sat, 30 Mar 2024 23:31:32 -0700 Subject: [PATCH 27/70] challege vm - add .panic_on_oops --- challenge/vm/vm | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/challenge/vm/vm b/challenge/vm/vm index 2a5040c52..a72610521 100755 --- a/challenge/vm/vm +++ b/challenge/vm/vm @@ -67,6 +67,10 @@ def extra_boot_flags(): if os.path.exists("/challenge/.nopti"): nopti = True + panic_on_oops = False + if os.path.exists("/challenge/.panic_on_oops"): + panic_on_oops = True + result = [] if nokaslr: result.append("nokaslr") @@ -74,6 +78,10 @@ def extra_boot_flags(): if nopti: result.append("nopti") + if panic_on_oops: + result.append("oops=panic") + result.append("panic_on_warn=1") + return result From 03f11a0d9d25f9763d3aa0c2c788ee72b41e2bbd Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Sun, 31 Mar 2024 21:52:47 -0700 Subject: [PATCH 28/70] Implementation of daily backups to cloud storage. (#219) Implementation of daily backups to cloud storage. Closes #207. --- Dockerfile | 8 +++++++- README.md | 6 ++++++ .../system/pwn.college.cloud.backup.service | 7 +++++++ .../system/pwn.college.cloud.backup.timer | 8 ++++++++ script/container-setup.sh | 11 ++++++++++ script/dojo | 20 +++++++++++++++++++ 6 files changed, 59 insertions(+), 1 deletion(-) create mode 100644 etc/systemd/system/pwn.college.cloud.backup.service create mode 100644 etc/systemd/system/pwn.college.cloud.backup.timer diff --git a/Dockerfile b/Dockerfile index e09dfd853..c68284bca 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,8 @@ RUN apt-get update && \ iputils-ping \ host \ htop \ - zfsutils-linux + zfsutils-linux \ + unzip RUN curl -fsSL https://get.docker.com | /bin/sh RUN echo '{ "data-root": "/opt/pwn.college/data/docker", "builder": {"Entitlements": {"security-insecure": true}} }' > /etc/docker/daemon.json @@ -22,6 +23,9 @@ RUN echo '{ "data-root": "/opt/pwn.college/data/docker", "builder": {"Entitlemen # TODO: this can be removed with docker-v22 (buildx will be default) RUN docker buildx install +# install aws cli (for cloud backups) +RUN cd /tmp && curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && unzip awscliv2.zip && ./aws/install && rm -rf awscliv2.zip ./aws + RUN git clone --branch 3.6.0 https://github.com/CTFd/CTFd /opt/CTFd RUN wget -O /etc/docker/seccomp.json https://raw.githubusercontent.com/moby/moby/master/profiles/seccomp/default.json @@ -32,6 +36,8 @@ RUN ln -s /opt/pwn.college/etc/systemd/system/pwn.college.backup.service /etc/sy RUN ln -s /opt/pwn.college/etc/systemd/system/pwn.college.backup.timer /etc/systemd/system/pwn.college.backup.timer RUN ln -s /opt/pwn.college/etc/systemd/system/pwn.college.cachewarmer.service /etc/systemd/system/pwn.college.cachewarmer.service RUN ln -s /opt/pwn.college/etc/systemd/system/pwn.college.cachewarmer.timer /etc/systemd/system/pwn.college.cachewarmer.timer +RUN ln -s /opt/pwn.college/etc/systemd/system/pwn.college.cloud.backup.service /etc/systemd/system/pwn.college.cloud.backup.service +RUN ln -s /opt/pwn.college/etc/systemd/system/pwn.college.cloud.backup.timer /etc/systemd/system/pwn.college.cloud.backup.timer RUN ln -s /etc/systemd/system/pwn.college.service /etc/systemd/system/multi-user.target.wants/pwn.college.service RUN ln -s /etc/systemd/system/pwn.college.logging.service /etc/systemd/system/multi-user.target.wants/pwn.college.logging.service RUN ln -s /etc/systemd/system/pwn.college.backup.timer /etc/systemd/system/timers.target.wants/pwn.college.backup.timer diff --git a/README.md b/README.md index d83af8db0..4dc322e26 100644 --- a/README.md +++ b/README.md @@ -100,6 +100,12 @@ _All_ dojo data will be stored in the `./data` directory. Once logged in, you can add a dojo by visiting `/dojos/create`. Dojos are contained within git repositories. Refer to [the example dojo](https://github.com/pwncollege/example-dojo) for more information. +## Cloud Backups + +If configured properly, the dojo will store the hourly database backups into an S3 bucket of your choosing. + +TODO ADD MORE HERE + ## Contributing We love Pull Requests! 🌟 diff --git a/etc/systemd/system/pwn.college.cloud.backup.service b/etc/systemd/system/pwn.college.cloud.backup.service new file mode 100644 index 000000000..a28e29870 --- /dev/null +++ b/etc/systemd/system/pwn.college.cloud.backup.service @@ -0,0 +1,7 @@ +[Unit] +Description=Upload a pwn.college backup to cloud (at this time just an S3 bucket). + +[Service] +Type=simple +ExecStart=dojo cloud-backup +ExecCondition=/bin/sh -c ". /opt/pwn.college/data/config.env; [ ! -z ${BACKUP_AES_KEY_FILE+x} ]" diff --git a/etc/systemd/system/pwn.college.cloud.backup.timer b/etc/systemd/system/pwn.college.cloud.backup.timer new file mode 100644 index 000000000..1fb6dd8f1 --- /dev/null +++ b/etc/systemd/system/pwn.college.cloud.backup.timer @@ -0,0 +1,8 @@ +[Unit] +Description=Timer to run pwn.college cloud backup service + +[Timer] +OnCalendar=daily + +[Install] +WantedBy=timers.target diff --git a/script/container-setup.sh b/script/container-setup.sh index 4d8384b5f..fa2eca2c3 100755 --- a/script/container-setup.sh +++ b/script/container-setup.sh @@ -44,6 +44,11 @@ define DB_NAME ctfd define DB_USER ctfd define DB_PASS ctfd define DB_EXTERNAL no # change to anything but no and the db container will not start mysql +define BACKUP_AES_KEY_FILE +define S3_BACKUP_BUCKET +define AWS_DEFAULT_REGION +define AWS_ACCESS_KEY_ID +define AWS_SECRET_ACCESS_KEY mv $DOJO_DIR/data/.config.env $DOJO_DIR/data/config.env . $DOJO_DIR/data/config.env @@ -63,6 +68,12 @@ if [ ! -f $DOJO_DIR/data/homes/homefs ]; then rm -rf $DOJO_DIR/data/homes/homefs_mount fi +# Create the AES key file if it does not exist +if [ ! -z ${BACKUP_AES_KEY_FILE+x} ] && [ ! -f ${BACKUP_AES_KEY_FILE} ] +then + openssl rand 214 > "${BACKUP_AES_KEY_FILE}" +fi + echo "[+] Creating loopback devices for home mounts. This might take a while." for i in $(seq 1 4096); do if [ -e /dev/loop$i ]; then diff --git a/script/dojo b/script/dojo index 40a013c25..e9ccd7a63 100755 --- a/script/dojo +++ b/script/dojo @@ -74,6 +74,26 @@ case "$ACTION" in docker exec ${CONTAINER_WITH_MYSQL} mysqldump -h ${DB_HOST} -p${DB_PASS} -u${DB_USER} --single-transaction --routines --triggers ${DB_NAME} | gzip > "data/backups/db-$(date -Iseconds).sql.gz" ;; + # HELP: cloud-backup: upload the last day's worth of cloud backups to S3, but encrypt it at rest + "cloud-backup") + [ -z ${BACKUP_AES_KEY_FILE+x} ] && echo "To use cloud backup, BACKUP_AES_KEY_FILE must be set"; exit -1 + [ -f $BACKUP_AES_KEY_FILE ] || echo "To use cloud backup, BACKUP_AES_KEY_FILE must be set to a file, but is $BACKUP_AES_KEY_FILE instead"; exit -1 + [ -z ${S3_BACKUP_BUCKET+x} ] && echo "To use cloud backup, S3_BACKUP_BUCKET must be set to the S3 bucket to use"; exit -1 + set -e + for f in $(find data/backups -mtime -1 -type f) + do + BACKUP_FILENAME="$(basename $f).enc" + OUT_FILE="/tmp/$BACKUP_FILENAME" + openssl enc -in "$f" -out "$OUT_FILE" -e -aes256 -pbkdf2 -kfile "$BACKUP_AES_KEY_FILE" + echo "created encrypted $OUT_FILE" + + aws s3 cp "$OUT_FILE" "s3://$S3_BACKUP_BUCKET/$BACKUP_FILENAME" + echo "backed up $OUT_FILE to S3 bucket $S3_BACKUP_BUCKET" + rm "$OUT_FILE" + done + ;; + + # HELP: restore PATH: restores a dojo db backup. Path arg is relative to the `data/backups` directory "restore") BACKUP_PATH="data/backups/$1" From 32a943f4968f18d5dcdfeabb5a8e245524bbcd63 Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Sun, 31 Mar 2024 22:29:53 -0700 Subject: [PATCH 29/70] Fix the cloud backup functionality --- script/dojo | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/script/dojo b/script/dojo index e9ccd7a63..2d8df56e3 100755 --- a/script/dojo +++ b/script/dojo @@ -76,9 +76,9 @@ case "$ACTION" in # HELP: cloud-backup: upload the last day's worth of cloud backups to S3, but encrypt it at rest "cloud-backup") - [ -z ${BACKUP_AES_KEY_FILE+x} ] && echo "To use cloud backup, BACKUP_AES_KEY_FILE must be set"; exit -1 - [ -f $BACKUP_AES_KEY_FILE ] || echo "To use cloud backup, BACKUP_AES_KEY_FILE must be set to a file, but is $BACKUP_AES_KEY_FILE instead"; exit -1 - [ -z ${S3_BACKUP_BUCKET+x} ] && echo "To use cloud backup, S3_BACKUP_BUCKET must be set to the S3 bucket to use"; exit -1 + [ -z ${BACKUP_AES_KEY_FILE+x} ] && (echo "To use cloud backup, BACKUP_AES_KEY_FILE must be set"; exit -1) + [ -f $BACKUP_AES_KEY_FILE ] || (echo "To use cloud backup, BACKUP_AES_KEY_FILE must be set to a file, but is $BACKUP_AES_KEY_FILE instead"; exit -1) + [ -z ${S3_BACKUP_BUCKET+x} ] && (echo "To use cloud backup, S3_BACKUP_BUCKET must be set to the S3 bucket to use"; exit -1) set -e for f in $(find data/backups -mtime -1 -type f) do @@ -87,13 +87,12 @@ case "$ACTION" in openssl enc -in "$f" -out "$OUT_FILE" -e -aes256 -pbkdf2 -kfile "$BACKUP_AES_KEY_FILE" echo "created encrypted $OUT_FILE" - aws s3 cp "$OUT_FILE" "s3://$S3_BACKUP_BUCKET/$BACKUP_FILENAME" + AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION} AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} aws s3 cp "$OUT_FILE" "s3://$S3_BACKUP_BUCKET/$BACKUP_FILENAME" echo "backed up $OUT_FILE to S3 bucket $S3_BACKUP_BUCKET" rm "$OUT_FILE" done ;; - # HELP: restore PATH: restores a dojo db backup. Path arg is relative to the `data/backups` directory "restore") BACKUP_PATH="data/backups/$1" From 0f8b15667ce9d3d90b5429f5887f8daf093730ce Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Mon, 1 Apr 2024 22:17:01 -0700 Subject: [PATCH 30/70] Workspace VM: Use only 1 core if no KVM --- challenge/vm/vm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/challenge/vm/vm b/challenge/vm/vm index a72610521..2e97e1dde 100755 --- a/challenge/vm/vm +++ b/challenge/vm/vm @@ -109,7 +109,7 @@ def start(): "-device", "e1000,netdev=net0", "-netdev", "user,id=net0,hostfwd=tcp::22-:22", "-m", "2G", - "-smp", "2", + "-smp", "2" if kvm else "1", "-nographic", "-monitor", "none", "-append", f"rw rootfstype=9p rootflags=trans=virtio console=ttyS0 init=/opt/pwn.college/vm/init {flags}", From 7d674f43de42dce9e027efd5f904a9499ebfffb0 Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Wed, 3 Apr 2024 16:07:40 +0000 Subject: [PATCH 31/70] Perf: Improve speed fetching belts and awards --- dojo_plugin/utils/awards.py | 88 ++++++++++++++++++++++--------------- 1 file changed, 52 insertions(+), 36 deletions(-) diff --git a/dojo_plugin/utils/awards.py b/dojo_plugin/utils/awards.py index 18d0cba48..ee5e75bd3 100644 --- a/dojo_plugin/utils/awards.py +++ b/dojo_plugin/utils/awards.py @@ -1,7 +1,7 @@ import datetime from CTFd.cache import cache -from CTFd.models import db +from CTFd.models import db, Users from flask import url_for from .discord import get_discord_roles, get_discord_user, add_role, send_message @@ -31,32 +31,63 @@ def get_user_emojis(user): return emojis def get_belts(): - result = { - "dates": {}, - "users": {}, - "ranks": {}, - } - + result = dict(dates={}, users={}, ranks={}) for color in reversed(BELT_ORDER): result["dates"][color] = {} result["ranks"][color] = [] - for belt in Belts.query.filter_by(name=color).order_by(Belts.date): - if belt.user.hidden: - continue - - result["dates"][color][belt.user.id] = str(belt.date) - if belt.user.id in result["users"]: - continue + belts = ( + Belts.query + .join(Users) + .filter(Belts.name.in_(BELT_ORDER), ~Users.hidden, Belts.date.isnot(None)) # TODO: Date should never be null + .with_entities( + Belts.date, + Belts.name.label("color"), + Users.id.label("user_id"), + Users.name.label("handle"), + Users.website.label("site"), + ) + ).all() + belts.sort(key=lambda belt: (-BELT_ORDER.index(belt.color), belt.date)) + + for belt in belts: + result["dates"][belt.color][belt.user_id] = str(belt.date) + if belt.user_id not in result["users"]: + result["users"][belt.user_id] = dict( + handle=belt.handle, + site=belt.site, + color=belt.color, + date=str(belt.date) + ) + result["ranks"][color].append(belt.user_id) - result["ranks"][color].append(belt.user.id) - result["users"][belt.user.id] = { - "handle": belt.user.name, - "site": belt.user.website, - "color": color, - "date": str(belt.date), - } + return result +def get_viewable_emojis(user): + result = { } + viewable_dojo_urls = { + dojo.hex_dojo_id: url_for("pwncollege_dojo.listing", dojo=dojo.reference_id) + for dojo in Dojos.viewable(user=user).where(Dojos.data["type"] != "example") + } + emojis = ( + Emojis.query + .join(Users) + .filter(~Users.hidden, Emojis.category.in_((*viewable_dojo_urls.keys(), None))) + .order_by(Emojis.date) + .with_entities( + Emojis.name, + Emojis.description, + Emojis.category, + Users.id.label("user_id"), + ) + ) + for emoji in emojis: + result.setdefault(emoji.user_id, []).append({ + "text": emoji.description, + "emoji": emoji.name, + "count": 1, + "url": viewable_dojo_urls.get(emoji.category, "#"), + }) return result def update_awards(user): @@ -94,18 +125,3 @@ def update_awards(user): continue db.session.add(Emojis(user=user, name=emoji, description=f"Awarded for completing the {dojo_name} dojo.", category=dojo_id)) db.session.commit() - -def get_viewable_emojis(user): - viewable_dojos = { dojo.hex_dojo_id:dojo for dojo in Dojos.viewable(user=user).where(Dojos.data["type"] != "example") } - emojis = { } - for emoji in Emojis.query.order_by(Emojis.date).all(): - if emoji.category and emoji.category not in viewable_dojos: - continue - - emojis.setdefault(emoji.user.id, []).append({ - "text": emoji.description, - "emoji": emoji.name, - "count": 1, - "url": url_for("pwncollege_dojo.listing", dojo=viewable_dojos[emoji.category].reference_id) if emoji.category else "#" - }) - return emojis From 6cc644907b71218140ea8857b58c449be4e094de Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Wed, 3 Apr 2024 22:16:16 +0000 Subject: [PATCH 32/70] Belts: Fix typo --- dojo_plugin/utils/awards.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo_plugin/utils/awards.py b/dojo_plugin/utils/awards.py index ee5e75bd3..31ba355b8 100644 --- a/dojo_plugin/utils/awards.py +++ b/dojo_plugin/utils/awards.py @@ -59,7 +59,7 @@ def get_belts(): color=belt.color, date=str(belt.date) ) - result["ranks"][color].append(belt.user_id) + result["ranks"][belt.color].append(belt.user_id) return result From 661322679a27b3d43cbf2596395615d6339ed8b6 Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Wed, 3 Apr 2024 22:45:27 +0000 Subject: [PATCH 33/70] Infrastructure: Log nginx requests as json --- docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/docker-compose.yml b/docker-compose.yml index 9d65f894f..e1faadbf2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -179,6 +179,7 @@ services: environment: - DOCKER_HOST=unix:///tmp/${DOCKER_PSLR}/docker.sock - DEFAULT_HOST=${DOJO_HOST} + - LOG_JSON=true volumes: - conf:/etc/nginx/conf.d - html:/usr/share/nginx/html From 06aefc486c7bb31e901f6405fb5a077f2b5e929c Mon Sep 17 00:00:00 2001 From: Robert Wasinger Date: Thu, 4 Apr 2024 18:45:14 -0700 Subject: [PATCH 34/70] Add libslub and kropr --- challenge/Dockerfile | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/challenge/Dockerfile b/challenge/Dockerfile index fec17aac1..81d382b83 100644 --- a/challenge/Dockerfile +++ b/challenge/Dockerfile @@ -82,6 +82,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ autoconf bc bison + cargo clang cmake cpio @@ -126,10 +127,16 @@ EOF ################################################################################ -FROM builder as builder-dojjail +FROM builder as builder-github-tools RUN < Date: Fri, 5 Apr 2024 17:52:06 +0000 Subject: [PATCH 35/70] Perf: Improve speed of `/dojos` endpoint --- dojo_plugin/pages/dojos.py | 37 +++++++++------ dojo_theme/templates/dojos.html | 81 +++++++++++++++++---------------- 2 files changed, 65 insertions(+), 53 deletions(-) diff --git a/dojo_plugin/pages/dojos.py b/dojo_plugin/pages/dojos.py index f0b640ed3..9d881c348 100644 --- a/dojo_plugin/pages/dojos.py +++ b/dojo_plugin/pages/dojos.py @@ -29,27 +29,34 @@ def dojo_stats(dojo): @dojos.route("/dojos") def listing(): user = get_current_user() - typed_dojos = { + categorized_dojos = { "Start Here": [], "Topics": [], "Courses": [], "More Material": [], } - for dojo in Dojos.viewable(user=user): - if dojo.type == "topic": - typed_dojos["Topics"].append(dojo) - elif dojo.type == "course": - typed_dojos["Courses"].append(dojo) - elif dojo.type == "hidden": - continue - elif dojo.type == "example" and dojo.official: + type_to_category = { + "topic": "Topics", + "course": "Courses", + "welcome": "Start Here" + } + options = db.undefer(Dojos.modules_count), db.undefer(Dojos.challenges_count) + dojo_solves = Dojos.viewable(user=user).options(*options) + if user: + solves_subquery = (DojoChallenges.solves(user=user, ignore_visibility=True, ignore_admins=False) + .group_by(DojoChallenges.dojo_id) + .with_entities(DojoChallenges.dojo_id, db.func.count().label("solve_count")) + .subquery()) + dojo_solves = (dojo_solves.outerjoin(solves_subquery, Dojos.dojo_id == solves_subquery.c.dojo_id) + .add_columns(db.func.coalesce(solves_subquery.c.solve_count, 0).label("solve_count"))) + else: + dojo_solves = dojo_solves.add_columns(0) + for dojo, solves in dojo_solves: + if dojo.type == "hidden" or (dojo.type == "example" and dojo.official): continue - elif dojo.type == "welcome": - typed_dojos["Start Here"].append(dojo) - else: - typed_dojos["More Material"].append(dojo) - - return render_template("dojos.html", user=user, typed_dojos=typed_dojos) + category = type_to_category.get(dojo.type, "More Material") + categorized_dojos[category].append((dojo, solves)) + return render_template("dojos.html", user=user, categorized_dojos=categorized_dojos) @dojos.route("/dojos/create") diff --git a/dojo_theme/templates/dojos.html b/dojo_theme/templates/dojos.html index 580b8be36..51899f3d4 100644 --- a/dojo_theme/templates/dojos.html +++ b/dojo_theme/templates/dojos.html @@ -1,52 +1,57 @@ {% extends "base.html" %} {% from "macros/widgets.html" import card %} +{% set dojo_descriptions = { + "Start Here": "These dojos are designed to help you begin your pwn.college journey. Start here before venturing onwards!", + "Topics": "These dojos form the official pwn.college curriculum, and you will earn belts when you complete them. We recommend that you tackle them in order.", + "Courses": "We run a number of courses on this platform. For the most part, these courses import the above material, though some might introduce new concepts and challenges.", + "More Material": "This section contains dojos created by the pwn.college community. Completing these dojos will grant you emoji badges!" +} %} + +{% set svg_add_icon %} + + + + + + + +{% endset %} + {% block content %}

Dojos

-

- The material on pwn.college is split into a number of "dojos", with each dojo typically covering a high-level topic. - These dojos are below. - Enter them when you are ready. -

+

The material on pwn.college is split into a number of "dojos", with each dojo typically covering a high-level topic. These dojos are below. Enter them when you are ready.

-
- {% for type, dojos in typed_dojos.items() %} -

{{ type | title }}

- {% if type == "Start Here" %} -

These dojos are designed to help you begin your pwn.college journey. Start here before venturing onwards!

- {% elif type == "Topics" %} -

These dojos form the official pwn.college curriculum, and you will earn belts when you complete them. We recommend that you tackle them in order. Good luck!

- {% elif type == "Courses" %} -

We run a number of courses on this platform. For the most part, these courses import the above material, though some might introduce new concepts and challenges.

- {% elif type.startswith("More") %} -

This section contains dojos created by the pwn.college community. Completing these dojos will grant you emoji badges!

- {% endif %} -
    - {% for dojo in dojos %} - {{ card(url_for("pwncollege_dojos.view_dojo", dojo=dojo.reference_id), - title=dojo.name or dojo.id, - text="{} Modules : ".format(dojo.modules | length) + "{} / {}".format(dojo.solves(user=user, ignore_visibility=True, ignore_admins=False).count() if user else 0, dojo.challenges | length), - icon="/themes/dojo_theme/static/img/dojo/{}.svg".format(dojo.award.belt) if (dojo.award.belt and dojo.official) else None, - emoji=dojo.award.emoji ) }} - {% endfor %} - {% if type.startswith("More") %} - {% call card(url_for("pwncollege_dojos.dojo_create"), custom=True) %} - - - - - - - - {% endcall %} - {% endif %} -
-
+
+ {% for category, dojos in categorized_dojos.items() %} +
+

{{ category }}

+

{{ dojo_descriptions[category] }}

+
    + {% for dojo, solves in dojos %} + {% set text = "{} Modules : {} / {}".format(dojo.modules_count, solves, dojo.challenges_count) %} + {% set icon = "/themes/dojo_theme/static/img/dojo/{}.svg".format(dojo.award.belt) if (dojo.award.belt and dojo.official) else None %} + {{ card( + url_for("pwncollege_dojos.view_dojo", dojo=dojo.reference_id), + title=dojo.name or dojo.id, + text=text, + icon=icon, + emoji=dojo.award.emoji, + ) }} + {% endfor %} + {% if category == "More Material" %} + {% call card(url_for("pwncollege_dojos.dojo_create"), custom=True) %} + {{ svg_add_icon }} + {% endcall %} + {% endif %} +
+
{% endfor %} +
{% endblock %} {% block scripts %} From f25d8ce06de517ba0abc3d021875c1204073fedf Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Fri, 5 Apr 2024 17:56:14 +0000 Subject: [PATCH 36/70] Perf: Add fast module and challenge counts for dojos --- dojo_plugin/models/__init__.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/dojo_plugin/models/__init__.py b/dojo_plugin/models/__init__.py index df32d3075..4c0a1e527 100644 --- a/dojo_plugin/models/__init__.py +++ b/dojo_plugin/models/__init__.py @@ -38,6 +38,14 @@ def wrapper(self, value): return decorator +deferred_definitions = [] +def deferred_definition(func): + deferred_definitions.append( + lambda: setattr(func.__globals__[func.__qualname__.split(".")[0]], + func.__name__, + func())) + + def columns_repr(column_names): def __repr__(self): description = " ".join(f"{name}={getattr(self, name)!r}" for name in column_names) @@ -155,6 +163,22 @@ def modules(self, value): challenge.module_index = module_index self._modules = value + @deferred_definition + def modules_count(): + return db.column_property( + db.select([db.func.count()]) + .where(Dojos.dojo_id == DojoModules.dojo_id) + .scalar_subquery(), + deferred=True) + + @deferred_definition + def challenges_count(): + return db.column_property( + db.select([db.func.count()]) + .where(Dojos.dojo_id == DojoChallenges.dojo_id) + .scalar_subquery(), + deferred=True) + @property def path(self): if hasattr(self, "_path"): @@ -681,3 +705,8 @@ class Belts(Awards): class Emojis(Awards): __mapper_args__ = {"polymorphic_identity": "emoji"} + + +for deferral in deferred_definitions: + deferral() +del deferred_definitions \ No newline at end of file From 1a940b51b371d2ec3f355dc5fe949a65e07168e7 Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 5 Apr 2024 18:20:44 -0700 Subject: [PATCH 37/70] Add nighly build (where nightly is defined by @adamdoupe as the typical nightime of the pwn.college team). Closes #278 --- .github/workflows/test.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8de49f672..f53182744 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,5 +1,9 @@ name: Test building and running dojo -on: [push, pull_request] +on: + push: + pull_request: + schedule: + - cron: '42 06 * * *' jobs: smoketest: runs-on: ubuntu-22.04 @@ -11,6 +15,7 @@ jobs: with: tags: dojo-test load: true + no-cache: event.github.event.schedule # cache-from: type=gha # cache-to: type=gha,mode=max From 8430b6efc7242c403eab05cb0559ca0492a2f87f Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 5 Apr 2024 18:27:00 -0700 Subject: [PATCH 38/70] workflow syntax --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f53182744..ae55ef610 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -15,7 +15,7 @@ jobs: with: tags: dojo-test load: true - no-cache: event.github.event.schedule + no-cache: ${{ event.github.event.schedule }} # cache-from: type=gha # cache-to: type=gha,mode=max From 5ee55f65260904ac23089941eb0567c68a22bd8b Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 5 Apr 2024 18:32:04 -0700 Subject: [PATCH 39/70] Another try --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ae55ef610..2c7585381 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -15,7 +15,7 @@ jobs: with: tags: dojo-test load: true - no-cache: ${{ event.github.event.schedule }} + no-cache: ${{ github.event_name == 'schedule' }} # cache-from: type=gha # cache-to: type=gha,mode=max From b43b6bba127e02c6616d05c545eec9850d53b87d Mon Sep 17 00:00:00 2001 From: Adam Doupe Date: Fri, 5 Apr 2024 19:37:29 -0700 Subject: [PATCH 40/70] Fix the cloud backup service. --- Dockerfile | 1 + etc/systemd/system/pwn.college.cloud.backup.service | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index c68284bca..be274537b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -42,6 +42,7 @@ RUN ln -s /etc/systemd/system/pwn.college.service /etc/systemd/system/multi-user RUN ln -s /etc/systemd/system/pwn.college.logging.service /etc/systemd/system/multi-user.target.wants/pwn.college.logging.service RUN ln -s /etc/systemd/system/pwn.college.backup.timer /etc/systemd/system/timers.target.wants/pwn.college.backup.timer RUN ln -s /etc/systemd/system/pwn.college.cachewarmer.timer /etc/systemd/system/timers.target.wants/pwn.college.cachewarmer.timer +RUN ln -s /etc/systemd/system/pwn.college.cloud.backup.timer /etc/systemd/system/timers.target.wants/pwn.college.cloud.backup.timer RUN mkdir -p /opt/pwn.college ADD . /opt/pwn.college diff --git a/etc/systemd/system/pwn.college.cloud.backup.service b/etc/systemd/system/pwn.college.cloud.backup.service index a28e29870..1fee9493d 100644 --- a/etc/systemd/system/pwn.college.cloud.backup.service +++ b/etc/systemd/system/pwn.college.cloud.backup.service @@ -4,4 +4,4 @@ Description=Upload a pwn.college backup to cloud (at this time just an S3 bucket [Service] Type=simple ExecStart=dojo cloud-backup -ExecCondition=/bin/sh -c ". /opt/pwn.college/data/config.env; [ ! -z ${BACKUP_AES_KEY_FILE+x} ]" +ExecCondition=:/bin/sh -c '. /opt/pwn.college/data/config.env; [ ! -z ${BACKUP_AES_KEY_FILE+x} ]' From 4da384eea920ddf852016436673c96fcd37881a0 Mon Sep 17 00:00:00 2001 From: Yan Shoshitaishvili Date: Sat, 6 Apr 2024 00:18:16 -0700 Subject: [PATCH 41/70] Nicer local testing (#383) * helper script for local testing * fixes * minor updates * fixes * document * move into test dir --- README.md | 3 +- test/local-tester.sh | 70 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 71 insertions(+), 2 deletions(-) create mode 100755 test/local-tester.sh diff --git a/README.md b/README.md index 4dc322e26..02a5b850d 100644 --- a/README.md +++ b/README.md @@ -114,5 +114,4 @@ Send a PR so everyone can benefit. For more substantial changes, open an issue to ensure we're on the same page. Together, we make this project better for all! 🚀 -You can run the dojo CI testcases locally using [act](https://github.com/nektos/act). -They should run using the "medium" image. +You can run the dojo CI testcases locally using `test/local-tester.sh`. diff --git a/test/local-tester.sh b/test/local-tester.sh new file mode 100755 index 000000000..e875de47a --- /dev/null +++ b/test/local-tester.sh @@ -0,0 +1,70 @@ +#!/bin/bash -ex + +cd $(dirname "${BASH_SOURCE[0]}")/.. + +function usage { + set +x + echo "Usage: $0 [-r DB_BACKUP ] [ -c CONTAINER_NAME ] [ -T ]" + echo "" + echo " -r db backup to restore (relative to dojo/data/backups)" + echo " -c the name of the dojo container (default: dojo-test)" + echo " -D use a blank data volume (builds everything from scratch)" + echo " -T don't run tests" + exit +} + +VOLUME_ARGS=("-v" "$PWD:/opt/pwn.college:shared") +ENV_ARGS=( ) +DB_RESTORE="" +CONTAINER_NAME=dojo-test +TEST=yes +while getopts "r:c:he:TD" OPT +do + case $OPT in + r) DB_RESTORE="$OPTARG" ;; + c) CONTAINER_NAME="$OPTARG" ;; + T) TEST=no ;; + D) + DATA_DIR=$(mktemp -d) + VOLUME_ARGS+=("-v" "$DATA_DIR:/opt/pwn.college/data:shared") + ;; + e) ENV_ARGS+=("-e" "$OPTARG") ;; + h) usage ;; + ?) + OPTIND=$(($OPTIND-1)) + break + ;; + esac +done +shift $((OPTIND-1)) + +[ "${#VOLUME_ARGS[@]}" -eq 2 ] && VOLUME_ARGS+=( + "-v" "/opt/pwn.college/data/dojos" + "-v" "/opt/pwn.college/data/mysql" +) + +export CONTAINER_NAME +docker kill "$CONTAINER_NAME" 2>/dev/null || echo "No $CONTAINER_NAME container to kill." +docker rm "$CONTAINER_NAME" 2>/dev/null || echo "No $CONTAINER_NAME container to remove." +while docker ps -a | grep "$CONTAINER_NAME"; do sleep 1; done + +# freaking bad unmount +sleep 1 +mount | grep $PWD | while read -a ENTRY +do + sudo umount "${ENTRY[2]}" +done + +docker run --rm --privileged -d "${VOLUME_ARGS[@]}" "${ENV_ARGS[@]}" -p 2222:22 -p 80:80 -p 443:443 --name "$CONTAINER_NAME" dojo || exit 1 + +# fix the insane routing thing +read -a GW <<<$(ip route show default) +read -a NS <<<$(docker exec "$CONTAINER_NAME" cat /etc/resolv.conf | grep nameserver) +docker exec "$CONTAINER_NAME" ip route add "${GW[2]}" via 172.17.0.1 +docker exec "$CONTAINER_NAME" ip route add "${NS[1]}" via 172.17.0.1 || echo "Failed to add nameserver route" + +docker exec "$CONTAINER_NAME" dojo wait +[ -n "$DB_RESTORE" ] && until docker exec "$CONTAINER_NAME" dojo restore $DB_RESTORE; do sleep 1; done + +until curl -s localhost.pwn.college | grep -q pwn; do sleep 1; done +[ "$TEST" == "yes" ] && MOZ_HEADLESS=1 pytest -v test/test_running.py test/test_welcome.py From 1168b99805c74b2c4a4b95e70d179e6db1fc73c5 Mon Sep 17 00:00:00 2001 From: Yan Date: Sat, 6 Apr 2024 00:31:45 -0700 Subject: [PATCH 42/70] allow hacker lookup by name --- dojo_plugin/pages/users.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/dojo_plugin/pages/users.py b/dojo_plugin/pages/users.py index 9ba1f9368..27d6167ea 100644 --- a/dojo_plugin/pages/users.py +++ b/dojo_plugin/pages/users.py @@ -39,6 +39,13 @@ def view_other(user_id): abort(404) return view_hacker(user) +@users.route("/hacker/") +def view_other_name(user_name): + user = Users.query.filter_by(name=user_name).first() + if user is None or user.hidden: + abort(404) + return view_hacker(user) + @users.route("/hacker/") @authed_only def view_self(): From cf0a1eb4625a2c429bae5c9a790bbe3832dbc72c Mon Sep 17 00:00:00 2001 From: Yan Date: Sat, 6 Apr 2024 15:12:50 -0700 Subject: [PATCH 43/70] update greetz --- index.html | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/index.html b/index.html index 69e8d4b39..57bf1675e 100644 --- a/index.html +++ b/index.html @@ -151,9 +151,10 @@

Getting Started

- redgate, for contributing to embryoasm. - [Pascal-0x90](https://twitter.com/pascal_0x90), for contributing to embryoasm. - [frqmod](https://twitter.com/frqmod), for contributing embryogdb. -- [kylebot](https://www.kylebot.net), for contributing babyfmt. +- [kylebot](https://www.kylebot.net), for contributing babyfmt, Kernel Exploitation, and the inaugural Quarterly Quiz! - ramen, for contributing babyfile. - zolutal, for contributing babyarch. +- spencerpogo, for contributing Windows support.

From 128228e061cf73d67ecf598737356510e4395d7b Mon Sep 17 00:00:00 2001 From: Robert Wasinger Date: Sat, 6 Apr 2024 16:10:54 -0700 Subject: [PATCH 44/70] add pt-dump gdb plugin to challenge image --- challenge/Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/challenge/Dockerfile b/challenge/Dockerfile index 81d382b83..fd9278d28 100644 --- a/challenge/Dockerfile +++ b/challenge/Dockerfile @@ -137,6 +137,8 @@ RUN < Date: Sat, 6 Apr 2024 00:51:28 -0700 Subject: [PATCH 45/70] also show emoji not attached to a dojo --- dojo_plugin/utils/awards.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo_plugin/utils/awards.py b/dojo_plugin/utils/awards.py index 31ba355b8..967e398e9 100644 --- a/dojo_plugin/utils/awards.py +++ b/dojo_plugin/utils/awards.py @@ -72,7 +72,7 @@ def get_viewable_emojis(user): emojis = ( Emojis.query .join(Users) - .filter(~Users.hidden, Emojis.category.in_((*viewable_dojo_urls.keys(), None))) + .filter(~Users.hidden, db.or_(Emojis.category.in_((*viewable_dojo_urls.keys(), None)), Emojis.category == None)) .order_by(Emojis.date) .with_entities( Emojis.name, From 9690d49925136b5d428840ff2b56579faa9c2796 Mon Sep 17 00:00:00 2001 From: Yan Date: Fri, 5 Apr 2024 20:46:15 -0700 Subject: [PATCH 46/70] increasing timeout --- not a good sign... --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2c7585381..632f37bd0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,7 @@ on: jobs: smoketest: runs-on: ubuntu-22.04 - timeout-minutes: 10 + timeout-minutes: 15 steps: - uses: actions/checkout@v3 - uses: docker/setup-buildx-action@v3 From 41d860c7652ac3db6834f48e8375a1bd5a3dfce4 Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Mon, 8 Apr 2024 10:20:48 -0700 Subject: [PATCH 47/70] Awards: Refactor null category --- dojo_plugin/utils/awards.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo_plugin/utils/awards.py b/dojo_plugin/utils/awards.py index 967e398e9..2a8030209 100644 --- a/dojo_plugin/utils/awards.py +++ b/dojo_plugin/utils/awards.py @@ -72,7 +72,7 @@ def get_viewable_emojis(user): emojis = ( Emojis.query .join(Users) - .filter(~Users.hidden, db.or_(Emojis.category.in_((*viewable_dojo_urls.keys(), None)), Emojis.category == None)) + .filter(~Users.hidden, db.or_(Emojis.category.in_(*viewable_dojo_urls.keys()), Emojis.category == None)) .order_by(Emojis.date) .with_entities( Emojis.name, From fc29efd175d0eb9f34c3f87aae1c3cdd0329ff33 Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Mon, 8 Apr 2024 10:50:38 -0700 Subject: [PATCH 48/70] Awards: Refactor null date --- dojo_plugin/utils/awards.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dojo_plugin/utils/awards.py b/dojo_plugin/utils/awards.py index 2a8030209..0d0365ddc 100644 --- a/dojo_plugin/utils/awards.py +++ b/dojo_plugin/utils/awards.py @@ -39,7 +39,7 @@ def get_belts(): belts = ( Belts.query .join(Users) - .filter(Belts.name.in_(BELT_ORDER), ~Users.hidden, Belts.date.isnot(None)) # TODO: Date should never be null + .filter(Belts.name.in_(BELT_ORDER), ~Users.hidden) .with_entities( Belts.date, Belts.name.label("color"), @@ -72,7 +72,7 @@ def get_viewable_emojis(user): emojis = ( Emojis.query .join(Users) - .filter(~Users.hidden, db.or_(Emojis.category.in_(*viewable_dojo_urls.keys()), Emojis.category == None)) + .filter(~Users.hidden, db.or_(Emojis.category.in_(viewable_dojo_urls.keys()), Emojis.category == None)) .order_by(Emojis.date) .with_entities( Emojis.name, From 4f359655800446cfda945bd96120169784ba7ea8 Mon Sep 17 00:00:00 2001 From: Jude Date: Mon, 8 Apr 2024 12:01:21 -0600 Subject: [PATCH 49/70] Windows VM improvments (#384) * remove spaces * add change vnc and ssh services' type to Manual in setup.ps1 * remove not working set service code in post_install.ps1 * add -force back to Stop-Computer cmd at end of post_install.ps1 * logoff by session name, console, instead session id --- challenge/windows/post_install.ps1 | 4 +--- challenge/windows/setup.ps1 | 8 ++++++-- challenge/windows/startup.ps1 | 4 ++-- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/challenge/windows/post_install.ps1 b/challenge/windows/post_install.ps1 index f3de9e274..daa927035 100644 --- a/challenge/windows/post_install.ps1 +++ b/challenge/windows/post_install.ps1 @@ -25,6 +25,4 @@ cl challenge-proxy.c # -- shutdown -- -Set-Service -Name sshd -StartupType Manual -Set-Service -Name tvnserver -StartupType Manual -Stop-Computer -computername localhost +Stop-Computer -computername localhost -force diff --git a/challenge/windows/setup.ps1 b/challenge/windows/setup.ps1 index 40dc90e49..903a88c8a 100644 --- a/challenge/windows/setup.ps1 +++ b/challenge/windows/setup.ps1 @@ -109,7 +109,7 @@ function EnableWmiRemoting($namespace) { throw "GetSecurityDescriptor failed: $($output.ReturnValue)" } $acl = $output.Descriptor - + $computerName = (Get-WmiObject Win32_ComputerSystem).Name $acc = Get-WmiObject -Class Win32_Group -Filter "Domain='$computerName' and Name='Users'" @@ -157,7 +157,7 @@ EnableWmiRemoting "Root/StandardCimv2" (Get-Content -Path C:\Windows\Temp\policy-edit.inf) ` -replace "PasswordComplexity = 1", "PasswordComplexity = 0" ` -replace "SeShutdownPrivilege .+", "`$0,hacker" ` - -replace "SeRemoteShutdownPrivilege .+", "`$0,hacker" | + -replace "SeRemoteShutdownPrivilege .+", "`$0,hacker" | Set-Content -Path C:\Windows\Temp\policy-edit.inf & secedit /configure /db C:\windows\security\local.sdb /cfg C:\Windows\Temp\policy-edit.inf Remove-Item -Force C:\Windows\Temp\policy-edit.inf @@ -253,5 +253,9 @@ Add-Content -Path $env:windir\System32\drivers\etc\hosts -Value "`n$ip`tpublic-l Copy-Item A:\config_startup.ps1 -Destination "C:\Program Files\Common Files\startup.ps1" & schtasks /create /tn "dojoinit" /sc onstart /delay 0000:00 /rl highest /ru system /tr "powershell.exe -file 'C:\Program Files\Common Files\startup.ps1'" /f +# config services' StartupType to start when Start-Service is called or manually started (Manual) instead of start with Windows (Automatic) +Set-Service -Name sshd -StartupType Manual +Set-Service -Name tvnserver -StartupType Manual + # -- shutdown -- Stop-Computer -computername localhost -force diff --git a/challenge/windows/startup.ps1 b/challenge/windows/startup.ps1 index d5afbe5af..1c75600c8 100644 --- a/challenge/windows/startup.ps1 +++ b/challenge/windows/startup.ps1 @@ -11,7 +11,7 @@ echo 'pwn.college{uninitialized}' > C:\flag # crash course in the footguns of NTFS's ACL based permissions system that I learned # the hard way: # - a "Deny" rule will always take precedence over an "Allow" rule. -# For example: Admins Allow Read + Users Deny Read +# For example: Admins Allow Read + Users Deny Read # This will result in no one being able to read the flag because they all fall under # the "Users" rule. # - ACLs inherit from the parent directory by default unless explicitly disabled. @@ -36,7 +36,7 @@ if (Test-Path X:\practice-mode-enabled) { Add-LocalGroupMember -Group "Administrators" -Member hacker } -logoff 1 +logoff console Start-Service sshd Start-Service tvnserver From b1e3a405c4848f6cebf8cbee22c68d9adac7a695 Mon Sep 17 00:00:00 2001 From: Robert Wasinger Date: Wed, 10 Apr 2024 18:55:30 -0700 Subject: [PATCH 50/70] landing page - fix typo --- index.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/index.html b/index.html index 57bf1675e..7c0a19f3c 100644 --- a/index.html +++ b/index.html @@ -91,7 +91,7 @@

Getting Started

- [Software Exploitation](https://pwn.college/software-exploitation/) ### How to get the actual belt? -To get your belt, [send us an email](malto:pwn@pwn.college) from the email address associated with your pwn.college account once you’ve completed the necessary challenges. +To get your belt, [send us an email](mailto:pwn@pwn.college) from the email address associated with your pwn.college account once you’ve completed the necessary challenges. We’ll then get your belt over to you (eventually)! Note that, due to logistical challenges, we're currently only _shipping_ belts to hackers after they earn their blue belt. Until then, we will belt you in person, at ASU or some security conference. From eba8abfa4259792a1d7815b27c361e78ffb1f72f Mon Sep 17 00:00:00 2001 From: Yan Date: Fri, 5 Apr 2024 15:59:54 -0700 Subject: [PATCH 51/70] export exec_run to a util module --- dojo_plugin/api/v1/docker.py | 29 +++++++++++++---------------- dojo_plugin/utils/workspace.py | 16 ++++++++++++++++ 2 files changed, 29 insertions(+), 16 deletions(-) create mode 100644 dojo_plugin/utils/workspace.py diff --git a/dojo_plugin/api/v1/docker.py b/dojo_plugin/api/v1/docker.py index a6af3c273..f1365a036 100644 --- a/dojo_plugin/api/v1/docker.py +++ b/dojo_plugin/api/v1/docker.py @@ -15,6 +15,7 @@ from ...models import Dojos, DojoModules, DojoChallenges from ...utils import serialize_user_flag, simple_tar, random_home_path, module_challenges_visible, user_ipv4 from ...utils.dojo import dojo_accessible, get_current_dojo_challenge +from ...utils.workspace import exec_run docker_namespace = Namespace( @@ -23,16 +24,6 @@ def start_challenge(user, dojo_challenge, practice): - def exec_run(cmd, *, shell=False, assert_success=True, user="root", **kwargs): - if shell: - cmd = f"""/bin/sh -c \" - {cmd} - \"""" - exit_code, output = container.exec_run(cmd, user=user, **kwargs) - if assert_success: - assert exit_code in (0, None), output - return exit_code, output - def setup_home(user): homes = pathlib.Path("/var/homes") homefs = homes / "homefs" @@ -138,8 +129,11 @@ def start_container(user, dojo_challenge, practice): return container def verify_nosuid_home(): - exit_code, output = exec_run("findmnt --output OPTIONS /home/hacker", - assert_success=False) + exit_code, output = exec_run( + "findmnt --output OPTIONS /home/hacker", + container=container, + assert_success=False + ) if exit_code != 0: container.kill() container.wait(condition="removed") @@ -157,6 +151,7 @@ def grant_sudo(): echo 'hacker ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers passwd -d root """, + container=container, shell=True ) @@ -164,14 +159,14 @@ def insert_challenge(user, dojo_challenge): for path in dojo_challenge.challenge_paths(user): with simple_tar(path, f"/challenge/{path.name}") as tar: container.put_archive("/", tar) - exec_run("chown -R root:root /challenge") - exec_run("chmod -R 4755 /challenge") + exec_run("chown -R root:root /challenge", container=container) + exec_run("chmod -R 4755 /challenge", container=container) def insert_flag(flag): - exec_run(f"echo 'pwn.college{{{flag}}}' > /flag", shell=True) + exec_run(f"echo 'pwn.college{{{flag}}}' > /flag", container=container, shell=True) def insert_auth_token(auth_token): - exec_run(f"echo '{auth_token}' > /.authtoken", shell=True) + exec_run(f"echo '{auth_token}' > /.authtoken", container=container, shell=True) def initialize_container(): exec_run( @@ -180,6 +175,7 @@ def initialize_container(): /opt/pwn.college/docker-initialize.sh touch /opt/pwn.college/.initialized """, + container=container, shell=True ) exec_run( @@ -187,6 +183,7 @@ def initialize_container(): /opt/pwn.college/docker-entrypoint.sh & """, shell=True, + container=container, user="hacker" ) diff --git a/dojo_plugin/utils/workspace.py b/dojo_plugin/utils/workspace.py new file mode 100644 index 000000000..5aa289d23 --- /dev/null +++ b/dojo_plugin/utils/workspace.py @@ -0,0 +1,16 @@ +import docker +docker_client = docker.from_env() + +def exec_run(cmd, *, shell=False, assert_success=True, user="root", pwncollege_uid=None, container=None, **kwargs): + + if shell: + cmd = f"""/bin/sh -c \" + {cmd} + \"""" + + if not container: + container = docker_client.containers.get(f"user_{pwncollege_uid}") + exit_code, output = container.exec_run(cmd, user=user, **kwargs) + if assert_success: + assert exit_code in (0, None), output + return exit_code, output From 98fb1f5b2ce8e608b4c0a266806cef33cf9cad85 Mon Sep 17 00:00:00 2001 From: Yan Date: Fri, 5 Apr 2024 20:32:04 -0700 Subject: [PATCH 52/70] switch to xfce by default --- challenge/Dockerfile | 5 ++--- script/container-setup.sh | 1 + 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/challenge/Dockerfile b/challenge/Dockerfile index fd9278d28..2dd509ac1 100644 --- a/challenge/Dockerfile +++ b/challenge/Dockerfile @@ -753,7 +753,8 @@ COPY --link --from=builder-github-tools / / ################################################################################ FROM challenge-micro as challenge-mini -COPY --link --from=builder-desktop-base / / +COPY --link --from=builder-desktop-base-yes / / +COPY --link --from=builder-desktop-xfce-yes / / ################################################################################ @@ -773,7 +774,6 @@ COPY --link --from=builder-geckodriver / / COPY --link --from=builder-burpsuite / / COPY --link --from=builder-busybox / / COPY --link --from=builder-glow / / -COPY --link --from=builder-desktop-xfce / / COPY --link --from=builder-virtiofsd /opt/virtiofsd /opt/virtiofsd COPY --link --from=builder-desktop-ida-free / / COPY --link --from=builder-desktop-binja-free / / @@ -807,7 +807,6 @@ COPY --link --from=builder-geckodriver-yes / / COPY --link --from=builder-burpsuite-yes / / COPY --link --from=builder-busybox-yes / / COPY --link --from=builder-glow-yes / / -COPY --link --from=builder-desktop-xfce-yes / / COPY --link --from=builder-virtiofsd-yes /opt/virtiofsd /opt/virtiofsd COPY --link --from=builder-desktop-ida-free / / COPY --link --from=builder-desktop-binja-free / / diff --git a/script/container-setup.sh b/script/container-setup.sh index fa2eca2c3..8e9af087b 100755 --- a/script/container-setup.sh +++ b/script/container-setup.sh @@ -36,6 +36,7 @@ define DISCORD_BOT_TOKEN define DISCORD_GUILD_ID define DEFAULT_INSTALL_SELECTION no # default to not installing tools define INSTALL_DESKTOP_BASE yes # matches the challenge-mini configuration +define INSTALL_XFCE yes # matches the challenge-mini configuration define INSTALL_IDA_FREE no # explicitly disable -- only for free dojos define INSTALL_BINJA_FREE no # explicitly disable -- only for free dojos define INSTALL_WINDOWS no # explicitly disable From 6dd2e510ee285c49675d3d7b75addff56545e795 Mon Sep 17 00:00:00 2001 From: Yan Date: Fri, 5 Apr 2024 19:51:08 -0700 Subject: [PATCH 53/70] launch the desktop only when it is requested --- challenge/Dockerfile | 1 + .../39_start_gui.sh => start-desktop.sh} | 2 ++ dojo_plugin/pages/workspace.py | 7 +++++++ 3 files changed, 10 insertions(+) rename challenge/{docker-entrypoint.d/39_start_gui.sh => start-desktop.sh} (97%) diff --git a/challenge/Dockerfile b/challenge/Dockerfile index 2dd509ac1..071170b92 100644 --- a/challenge/Dockerfile +++ b/challenge/Dockerfile @@ -393,6 +393,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ fluxbox xterm EOF +COPY start-desktop.sh /opt/pwn.college/start-desktop.sh FROM builder-desktop-base-${INSTALL_DESKTOP_BASE} as builder-desktop-base diff --git a/challenge/docker-entrypoint.d/39_start_gui.sh b/challenge/start-desktop.sh similarity index 97% rename from challenge/docker-entrypoint.d/39_start_gui.sh rename to challenge/start-desktop.sh index b2552a169..cd2316dd3 100755 --- a/challenge/docker-entrypoint.d/39_start_gui.sh +++ b/challenge/start-desktop.sh @@ -1,5 +1,7 @@ #!/bin/sh +ps aux | grep -q X[t]igervnc && exit + mkdir -p /tmp/.dojo/vnc /home/hacker/.vnc container_id="$(cat /.authtoken)" diff --git a/dojo_plugin/pages/workspace.py b/dojo_plugin/pages/workspace.py index 0b32007c0..5476ca900 100644 --- a/dojo_plugin/pages/workspace.py +++ b/dojo_plugin/pages/workspace.py @@ -8,6 +8,7 @@ from ..models import Dojos from ..utils import random_home_path, redirect_user_socket, get_current_container from ..utils.dojo import dojo_route, get_current_dojo_challenge +from ..utils.workspace import exec_run workspace = Blueprint("pwncollege_workspace", __name__) @@ -39,6 +40,12 @@ def view_desktop(): if not container: return render_template("iframe.html", active=False) + exec_run( + "/opt/pwn.college/start-desktop.sh 2>&1 > /tmp/.dojo/desktop.log", + user="hacker", pwncollege_uid=user.id, shell=True, + assert_success=True + ) + interact_password = container_password(container, "desktop", "interact") view_password = container_password(container, "desktop", "view") From 249b70440106037e3f9c2e1a7a9ace6d26bad3d6 Mon Sep 17 00:00:00 2001 From: Yan Date: Fri, 5 Apr 2024 21:31:07 -0700 Subject: [PATCH 54/70] combine desktop-base and desktop-xfce into desktop --- challenge/Dockerfile | 34 +++++++++------------------------- docker-compose.yml | 3 +-- script/container-setup.sh | 3 +-- 3 files changed, 11 insertions(+), 29 deletions(-) diff --git a/challenge/Dockerfile b/challenge/Dockerfile index 071170b92..16545a985 100644 --- a/challenge/Dockerfile +++ b/challenge/Dockerfile @@ -14,8 +14,7 @@ ARG INSTALL_GECKODRIVER=${DEFAULT_INSTALL_SELECTION} ARG INSTALL_BURPSUITE=${DEFAULT_INSTALL_SELECTION} ARG INSTALL_BUSYBOX=${DEFAULT_INSTALL_SELECTION} ARG INSTALL_GLOW=${DEFAULT_INSTALL_SELECTION} -ARG INSTALL_DESKTOP_BASE=${DEFAULT_INSTALL_SELECTION} -ARG INSTALL_XFCE=${DEFAULT_INSTALL_SELECTION} +ARG INSTALL_DESKTOP=${DEFAULT_INSTALL_SELECTION} ARG INSTALL_VIRTIOFSD=${DEFAULT_INSTALL_SELECTION} ARG INSTALL_IDA_FREE=${DEFAULT_INSTALL_SELECTION} ARG INSTALL_BINJA_FREE=${DEFAULT_INSTALL_SELECTION} @@ -377,37 +376,22 @@ FROM builder-virtiofsd-${INSTALL_VIRTIOFSD} as builder-virtiofsd ################################################################################ -FROM essentials as builder-desktop-base-no -FROM essentials as builder-desktop-base-yes +FROM essentials as builder-desktop-no +FROM essentials as builder-desktop-yes ARG UBUNTU_VERSION=20.04 RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \ [ "${UBUNTU_VERSION}" == "20.04" ] && TGR=common || TGR=tools; \ - apt-get update && xargs apt-get install --no-install-recommends -yqq < Date: Fri, 5 Apr 2024 23:18:53 -0700 Subject: [PATCH 55/70] delay vscode start until user request --- challenge/Dockerfile | 2 ++ .../20_start_code_server.sh => start-vscode.sh} | 4 ++++ dojo_plugin/pages/workspace.py | 13 ++++++++++++- 3 files changed, 18 insertions(+), 1 deletion(-) rename challenge/{docker-entrypoint.d/20_start_code_server.sh => start-vscode.sh} (85%) diff --git a/challenge/Dockerfile b/challenge/Dockerfile index 16545a985..413138c96 100644 --- a/challenge/Dockerfile +++ b/challenge/Dockerfile @@ -258,6 +258,8 @@ EOF RUN chmod +x rg +COPY start-vscode.sh /opt/pwn.college/start-vscode.sh + ################################################################################ FROM builder as builder-tcpdump-no diff --git a/challenge/docker-entrypoint.d/20_start_code_server.sh b/challenge/start-vscode.sh similarity index 85% rename from challenge/docker-entrypoint.d/20_start_code_server.sh rename to challenge/start-vscode.sh index e6ee0607d..aea8a2eaf 100755 --- a/challenge/docker-entrypoint.d/20_start_code_server.sh +++ b/challenge/start-vscode.sh @@ -1,5 +1,7 @@ #!/bin/sh +ps aux | grep -q code-serve[r] && exit + mkdir -p /tmp/.dojo/code-server start-stop-daemon --start \ --pidfile /tmp/.dojo/code-server/code-server.pid \ @@ -15,3 +17,5 @@ start-stop-daemon --start \ >/tmp/.dojo/code-server/code-server.log \ 2>&1 + +until curl -s dojo-user:6080 >/dev/null; do sleep 0.1; done diff --git a/dojo_plugin/pages/workspace.py b/dojo_plugin/pages/workspace.py index 5476ca900..c3a239b4a 100644 --- a/dojo_plugin/pages/workspace.py +++ b/dojo_plugin/pages/workspace.py @@ -18,6 +18,10 @@ "desktop": 6081, "desktop-windows": 6082, } +init_scripts = { + "vscode": "/opt/pwn.college/start-vscode.sh", + "desktop": "/opt/pwn.college/start-desktop.sh" +} def container_password(container, *args): @@ -41,7 +45,7 @@ def view_desktop(): return render_template("iframe.html", active=False) exec_run( - "/opt/pwn.college/start-desktop.sh 2>&1 > /tmp/.dojo/desktop.log", + f"{init_scripts['desktop']} 2>&1 > /tmp/.dojo/service-desktop.log", user="hacker", pwncollege_uid=user.id, shell=True, assert_success=True ) @@ -107,6 +111,13 @@ def forward_workspace(service, service_path=""): except ValueError: abort(404) + if service in init_scripts: + exec_run( + f"{init_scripts[service]} 2>&1 > /tmp/.dojo/service-{service}.log", + user="hacker", pwncollege_uid=user.id, shell=True, + assert_success=True + ) + elif service.count("~") == 1: port, user_id = service.split("~", 1) try: From f5736bbe4b22a7eed4a83c9cf5a826b4cbe04809 Mon Sep 17 00:00:00 2001 From: Yan Date: Fri, 5 Apr 2024 23:35:27 -0700 Subject: [PATCH 56/70] move service start scripts to /opt/pwn.college/services.d copy the init scripts in builder-pwn.college --- challenge/Dockerfile | 4 +--- challenge/{start-desktop.sh => services.d/desktop} | 0 .../desktop-win} | 4 ++++ challenge/{start-vscode.sh => services.d/vscode} | 0 dojo_plugin/pages/workspace.py | 12 ++++-------- 5 files changed, 9 insertions(+), 11 deletions(-) rename challenge/{start-desktop.sh => services.d/desktop} (100%) rename challenge/{docker-entrypoint.d/38_start_windows_gui.sh => services.d/desktop-win} (84%) rename challenge/{start-vscode.sh => services.d/vscode} (100%) diff --git a/challenge/Dockerfile b/challenge/Dockerfile index 413138c96..7d65cdc08 100644 --- a/challenge/Dockerfile +++ b/challenge/Dockerfile @@ -258,8 +258,6 @@ EOF RUN chmod +x rg -COPY start-vscode.sh /opt/pwn.college/start-vscode.sh - ################################################################################ FROM builder as builder-tcpdump-no @@ -404,7 +402,6 @@ EOF # TODO: can we generate desktop data with cli ??? COPY desktop/xfce4 /usr/share/desktop-base/profiles/xdg-config/xfce4 COPY desktop/pwncollege_background.jpg /usr/share/backgrounds/ -COPY start-desktop.sh /opt/pwn.college/start-desktop.sh RUN rm /etc/xdg/autostart/* @@ -572,6 +569,7 @@ RUN mkdir /opt/pwn.college COPY docker-initialize.sh /opt/pwn.college/docker-initialize.sh COPY docker-entrypoint.d /opt/pwn.college/docker-entrypoint.d COPY docker-entrypoint.sh /opt/pwn.college/docker-entrypoint.sh +COPY services.d /opt/pwn.college/services.d COPY setuid_interpreter.c /opt/pwn.college/setuid_interpreter.c COPY bash.bashrc /opt/pwn.college/bash.bashrc COPY vm /opt/pwn.college/vm diff --git a/challenge/start-desktop.sh b/challenge/services.d/desktop similarity index 100% rename from challenge/start-desktop.sh rename to challenge/services.d/desktop diff --git a/challenge/docker-entrypoint.d/38_start_windows_gui.sh b/challenge/services.d/desktop-win similarity index 84% rename from challenge/docker-entrypoint.d/38_start_windows_gui.sh rename to challenge/services.d/desktop-win index 4b7f2ecf5..5f424b73a 100755 --- a/challenge/docker-entrypoint.d/38_start_windows_gui.sh +++ b/challenge/services.d/desktop-win @@ -1,5 +1,7 @@ #!/bin/sh +ps aux | grep -q [d]ojo-user:6082 && exit + mkdir -p /tmp/.dojo/vnc /home/hacker/.vnc start-stop-daemon --start \ --pidfile /tmp/.dojo/vnc/websockify-windows.pid \ @@ -14,3 +16,5 @@ start-stop-daemon --start \ >/tmp/.dojo/vnc/websockify-windows.log \ 2>&1 + +until curl -s dojo-user:6080 >/dev/null; do sleep 0.1; done diff --git a/challenge/start-vscode.sh b/challenge/services.d/vscode similarity index 100% rename from challenge/start-vscode.sh rename to challenge/services.d/vscode diff --git a/dojo_plugin/pages/workspace.py b/dojo_plugin/pages/workspace.py index c3a239b4a..4937ae4d0 100644 --- a/dojo_plugin/pages/workspace.py +++ b/dojo_plugin/pages/workspace.py @@ -18,11 +18,7 @@ "desktop": 6081, "desktop-windows": 6082, } -init_scripts = { - "vscode": "/opt/pwn.college/start-vscode.sh", - "desktop": "/opt/pwn.college/start-desktop.sh" -} - +ondemand_services = { "vscode", "desktop", "desktop-windows" } def container_password(container, *args): key = container.labels["dojo.auth_token"].encode() @@ -45,7 +41,7 @@ def view_desktop(): return render_template("iframe.html", active=False) exec_run( - f"{init_scripts['desktop']} 2>&1 > /tmp/.dojo/service-desktop.log", + "/opt/pwn.college/services.d/desktop 2>&1 > /tmp/.dojo/service-desktop.log", user="hacker", pwncollege_uid=user.id, shell=True, assert_success=True ) @@ -111,9 +107,9 @@ def forward_workspace(service, service_path=""): except ValueError: abort(404) - if service in init_scripts: + if service in ondemand_services: exec_run( - f"{init_scripts[service]} 2>&1 > /tmp/.dojo/service-{service}.log", + f"/opt/pwn.college/services.d/{service} 2>&1 > /tmp/.dojo/service-{service}.log", user="hacker", pwncollege_uid=user.id, shell=True, assert_success=True ) From d68bab256b5198280c47513aa4f0b7b056bc8c66 Mon Sep 17 00:00:00 2001 From: Yan Date: Fri, 5 Apr 2024 23:46:25 -0700 Subject: [PATCH 57/70] blindly adding support for windows auto-start rename fix windows port in init script --- challenge/services.d/{desktop-win => desktop-windows} | 2 +- dojo_plugin/pages/desktop.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) rename challenge/services.d/{desktop-win => desktop-windows} (90%) diff --git a/challenge/services.d/desktop-win b/challenge/services.d/desktop-windows similarity index 90% rename from challenge/services.d/desktop-win rename to challenge/services.d/desktop-windows index 5f424b73a..f14cd0520 100755 --- a/challenge/services.d/desktop-win +++ b/challenge/services.d/desktop-windows @@ -17,4 +17,4 @@ start-stop-daemon --start \ >>/tmp/.dojo/vnc/websockify-windows.log \ 2>&1 -until curl -s dojo-user:6080 >/dev/null; do sleep 0.1; done +until curl -s dojo-user:6082 >/dev/null; do sleep 0.1; done diff --git a/dojo_plugin/pages/desktop.py b/dojo_plugin/pages/desktop.py index 4298cbf46..a880c776e 100644 --- a/dojo_plugin/pages/desktop.py +++ b/dojo_plugin/pages/desktop.py @@ -7,6 +7,7 @@ from ..utils import random_home_path, get_active_users, redirect_user_socket from ..utils.dojo import dojo_route, get_current_dojo_challenge +from ..utils.workspace import exec_run desktop = Blueprint("pwncollege_desktop", __name__) @@ -61,6 +62,11 @@ def view_desktop(user_id=None): @desktop.route("/desktop-win/") @authed_only def view_desktop_win(user_id=None): + exec_run( + "/opt/pwn.college/services.d/desktop-windows 2>&1 > /tmp/.dojo/service-desktop-windows.log", + user="hacker", pwncollege_uid=user_id or get_current_user().id, shell=True, + assert_success=True + ) return view_desktop_res("desktop-win", user_id, "abcd") From c19e43e354dc164350648aeb216047fb316f3cfc Mon Sep 17 00:00:00 2001 From: Yan Date: Mon, 8 Apr 2024 17:10:32 -0700 Subject: [PATCH 58/70] better log redirection and singleton checking --- challenge/services.d/desktop | 3 ++- challenge/services.d/desktop-windows | 3 ++- challenge/services.d/vscode | 3 ++- dojo_plugin/pages/desktop.py | 2 +- dojo_plugin/pages/workspace.py | 4 ++-- 5 files changed, 9 insertions(+), 6 deletions(-) diff --git a/challenge/services.d/desktop b/challenge/services.d/desktop index cd2316dd3..b7754d99f 100755 --- a/challenge/services.d/desktop +++ b/challenge/services.d/desktop @@ -1,6 +1,7 @@ #!/bin/sh -ps aux | grep -q X[t]igervnc && exit +[ -f /tmp/.dojo/vnc/vncserver.pid ] && exit +exec 2> /tmp/.dojo/service-desktop.log mkdir -p /tmp/.dojo/vnc /home/hacker/.vnc diff --git a/challenge/services.d/desktop-windows b/challenge/services.d/desktop-windows index f14cd0520..f90a60e15 100755 --- a/challenge/services.d/desktop-windows +++ b/challenge/services.d/desktop-windows @@ -1,6 +1,7 @@ #!/bin/sh -ps aux | grep -q [d]ojo-user:6082 && exit +[ -f /tmp/.dojo/vnc/websockify-windows.pid ] && exit +exec 2> /tmp/.dojo/service-desktop-windows.log mkdir -p /tmp/.dojo/vnc /home/hacker/.vnc start-stop-daemon --start \ diff --git a/challenge/services.d/vscode b/challenge/services.d/vscode index aea8a2eaf..22a807ad7 100755 --- a/challenge/services.d/vscode +++ b/challenge/services.d/vscode @@ -1,6 +1,7 @@ #!/bin/sh -ps aux | grep -q code-serve[r] && exit +[ -f /tmp/.dojo/code-server/code-server.pid ] && exit +exec 2> /tmp/.dojo/service-vscode.log mkdir -p /tmp/.dojo/code-server start-stop-daemon --start \ diff --git a/dojo_plugin/pages/desktop.py b/dojo_plugin/pages/desktop.py index a880c776e..886283b38 100644 --- a/dojo_plugin/pages/desktop.py +++ b/dojo_plugin/pages/desktop.py @@ -63,7 +63,7 @@ def view_desktop(user_id=None): @authed_only def view_desktop_win(user_id=None): exec_run( - "/opt/pwn.college/services.d/desktop-windows 2>&1 > /tmp/.dojo/service-desktop-windows.log", + "/opt/pwn.college/services.d/desktop-windows", user="hacker", pwncollege_uid=user_id or get_current_user().id, shell=True, assert_success=True ) diff --git a/dojo_plugin/pages/workspace.py b/dojo_plugin/pages/workspace.py index 4937ae4d0..3f8dc5f91 100644 --- a/dojo_plugin/pages/workspace.py +++ b/dojo_plugin/pages/workspace.py @@ -41,7 +41,7 @@ def view_desktop(): return render_template("iframe.html", active=False) exec_run( - "/opt/pwn.college/services.d/desktop 2>&1 > /tmp/.dojo/service-desktop.log", + "/opt/pwn.college/services.d/desktop", user="hacker", pwncollege_uid=user.id, shell=True, assert_success=True ) @@ -109,7 +109,7 @@ def forward_workspace(service, service_path=""): if service in ondemand_services: exec_run( - f"/opt/pwn.college/services.d/{service} 2>&1 > /tmp/.dojo/service-{service}.log", + f"/opt/pwn.college/services.d/{service}", user="hacker", pwncollege_uid=user.id, shell=True, assert_success=True ) From 9fc12f75a0fa1f44c7e06fd4805107a7666d4d8f Mon Sep 17 00:00:00 2001 From: Yan Date: Mon, 8 Apr 2024 17:12:54 -0700 Subject: [PATCH 59/70] better checking for the desktop coming up --- challenge/services.d/desktop | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/challenge/services.d/desktop b/challenge/services.d/desktop index b7754d99f..592b2c659 100755 --- a/challenge/services.d/desktop +++ b/challenge/services.d/desktop @@ -42,7 +42,8 @@ start-stop-daemon --start \ >>/tmp/.dojo/vnc/websockify.log \ 2>&1 -seq 1 50 | while read cnt; do sleep 0.1; [ -e /tmp/.X11-unix/X42 ] && break; done +until [ -e /tmp/.X11-unix/X42 ]; do sleep 0.1; done +until curl -s dojo-user:6081 >/dev/null; do sleep 0.1; done export DISPLAY=:42 From 6f43aeaeaecb233aefb7630a56e19c311f6294f8 Mon Sep 17 00:00:00 2001 From: Yan Date: Mon, 8 Apr 2024 17:33:17 -0700 Subject: [PATCH 60/70] change function signature of exec_run --- dojo_plugin/api/v1/docker.py | 2 +- dojo_plugin/pages/desktop.py | 2 +- dojo_plugin/pages/workspace.py | 4 ++-- dojo_plugin/utils/workspace.py | 6 +++--- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/dojo_plugin/api/v1/docker.py b/dojo_plugin/api/v1/docker.py index f1365a036..d646f150b 100644 --- a/dojo_plugin/api/v1/docker.py +++ b/dojo_plugin/api/v1/docker.py @@ -184,7 +184,7 @@ def initialize_container(): """, shell=True, container=container, - user="hacker" + workspace_user="hacker" ) setup_home(user) diff --git a/dojo_plugin/pages/desktop.py b/dojo_plugin/pages/desktop.py index 886283b38..89e0da5d8 100644 --- a/dojo_plugin/pages/desktop.py +++ b/dojo_plugin/pages/desktop.py @@ -64,7 +64,7 @@ def view_desktop(user_id=None): def view_desktop_win(user_id=None): exec_run( "/opt/pwn.college/services.d/desktop-windows", - user="hacker", pwncollege_uid=user_id or get_current_user().id, shell=True, + workspace_user="hacker", user_id=user_id or get_current_user().id, shell=True, assert_success=True ) return view_desktop_res("desktop-win", user_id, "abcd") diff --git a/dojo_plugin/pages/workspace.py b/dojo_plugin/pages/workspace.py index 3f8dc5f91..ec8c56a86 100644 --- a/dojo_plugin/pages/workspace.py +++ b/dojo_plugin/pages/workspace.py @@ -42,7 +42,7 @@ def view_desktop(): exec_run( "/opt/pwn.college/services.d/desktop", - user="hacker", pwncollege_uid=user.id, shell=True, + workspace_user="hacker", user_id=user.id, shell=True, assert_success=True ) @@ -110,7 +110,7 @@ def forward_workspace(service, service_path=""): if service in ondemand_services: exec_run( f"/opt/pwn.college/services.d/{service}", - user="hacker", pwncollege_uid=user.id, shell=True, + workspace_user="hacker", user_id=user.id, shell=True, assert_success=True ) diff --git a/dojo_plugin/utils/workspace.py b/dojo_plugin/utils/workspace.py index 5aa289d23..ab2feabd5 100644 --- a/dojo_plugin/utils/workspace.py +++ b/dojo_plugin/utils/workspace.py @@ -1,7 +1,7 @@ import docker docker_client = docker.from_env() -def exec_run(cmd, *, shell=False, assert_success=True, user="root", pwncollege_uid=None, container=None, **kwargs): +def exec_run(cmd, *, shell=False, assert_success=True, workspace_user="root", user_id=None, container=None, **kwargs): if shell: cmd = f"""/bin/sh -c \" @@ -9,8 +9,8 @@ def exec_run(cmd, *, shell=False, assert_success=True, user="root", pwncollege_u \"""" if not container: - container = docker_client.containers.get(f"user_{pwncollege_uid}") - exit_code, output = container.exec_run(cmd, user=user, **kwargs) + container = docker_client.containers.get(f"user_{user_id}") + exit_code, output = container.exec_run(cmd, user=workspace_user, **kwargs) if assert_success: assert exit_code in (0, None), output return exit_code, output From 20a30686cfe0e0815b697aac165e832cfcc00ba5 Mon Sep 17 00:00:00 2001 From: Yan Date: Thu, 11 Apr 2024 14:24:23 -0700 Subject: [PATCH 61/70] double testcase reliability --- test/test_welcome.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/test_welcome.py b/test/test_welcome.py index 9e0b2e9b5..2a35c12c9 100644 --- a/test/test_welcome.py +++ b/test/test_welcome.py @@ -35,9 +35,9 @@ def vscode_terminal(wd): wd.switch_to.new_window("tab") wd.get(f"{PROTO}://{HOST}/workspace/vscode/") - time.sleep(1) + time.sleep(2) wd.switch_to.active_element.send_keys(Keys.CONTROL + Keys.SHIFT + "`") - time.sleep(1) + time.sleep(2) yield wd.switch_to.active_element @@ -50,11 +50,11 @@ def desktop_terminal(wd, user_id): wd.switch_to.new_window("tab") wd.get(f"{PROTO}://{HOST}/workspace/desktop") - time.sleep(1) + time.sleep(2) workspace_run("DISPLAY=:42.0 xterm &", user=user_id) wd.switch_to.frame("workspace") e = wd.find_element("id", "noVNC_keyboardinput") - time.sleep(1) + time.sleep(2) yield e From 3e8c070842e5233e73df40d17e672a7d1d60e5e5 Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Tue, 16 Apr 2024 21:34:20 -0700 Subject: [PATCH 62/70] CSS: Fix scoreboard on Safari --- dojo_theme/static/css/custom.css | 4 ++++ dojo_theme/static/js/dojo/scoreboard.js | 12 ++++++------ 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/dojo_theme/static/css/custom.css b/dojo_theme/static/css/custom.css index 61b68976c..31f75a5ab 100644 --- a/dojo_theme/static/css/custom.css +++ b/dojo_theme/static/css/custom.css @@ -271,6 +271,10 @@ h4 { font-weight: bold; } +.scoreboard tr { + display: flex; +} + .discord-avatar { display: block; margin: auto; diff --git a/dojo_theme/static/js/dojo/scoreboard.js b/dojo_theme/static/js/dojo/scoreboard.js index 375839b43..717a49f7a 100644 --- a/dojo_theme/static/js/dojo/scoreboard.js +++ b/dojo_theme/static/js/dojo/scoreboard.js @@ -43,20 +43,20 @@ function loadScoreboard(duration, page) { standings.forEach(user => { const row = $(` - #${user.rank} - + #${user.rank} + - + - + - + - ${user.solves} + ${user.solves} `); row.find(".scoreboard-name").text(user.name.slice(0, 50)); From 52f640ddc782eb9370884d7160b30aa53d3905cb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Apr 2024 11:00:49 -0700 Subject: [PATCH 63/70] Bump werkzeug from 2.2.3 to 2.3.8 in /ctfd (#393) Bumps [werkzeug](https://github.com/pallets/werkzeug) from 2.2.3 to 2.3.8. - [Release notes](https://github.com/pallets/werkzeug/releases) - [Changelog](https://github.com/pallets/werkzeug/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/werkzeug/compare/2.2.3...2.3.8) --- updated-dependencies: - dependency-name: werkzeug dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- ctfd/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ctfd/requirements.txt b/ctfd/requirements.txt index 137f32998..03a11c4b8 100644 --- a/ctfd/requirements.txt +++ b/ctfd/requirements.txt @@ -8,7 +8,7 @@ flask-shell-ipython==0.5.1 # CTFd Flask==2.2.5 -Werkzeug==2.2.3 +Werkzeug==2.3.8 Flask-SQLAlchemy==2.5.1 Flask-Caching==2.0.2 Flask-Migrate==2.5.3 From 80f6eed8a3859466158f05a0a0a337059d682235 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Apr 2024 11:01:28 -0700 Subject: [PATCH 64/70] Bump mio from 0.8.8 to 0.8.11 in /challenge/windows/challenge-proxy (#362) Bumps [mio](https://github.com/tokio-rs/mio) from 0.8.8 to 0.8.11. - [Release notes](https://github.com/tokio-rs/mio/releases) - [Changelog](https://github.com/tokio-rs/mio/blob/master/CHANGELOG.md) - [Commits](https://github.com/tokio-rs/mio/compare/v0.8.8...v0.8.11) --- updated-dependencies: - dependency-name: mio dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- challenge/windows/challenge-proxy/Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/challenge/windows/challenge-proxy/Cargo.lock b/challenge/windows/challenge-proxy/Cargo.lock index 6b03e61a7..000b00d82 100644 --- a/challenge/windows/challenge-proxy/Cargo.lock +++ b/challenge/windows/challenge-proxy/Cargo.lock @@ -231,9 +231,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.147" +version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "linux-raw-sys" @@ -274,9 +274,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.8" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "wasi", From 79e59df45b15c36e120bace48e653d623548bf51 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 18 Apr 2024 11:01:51 -0700 Subject: [PATCH 65/70] Bump gunicorn from 20.1.0 to 22.0.0 in /ctfd (#400) Bumps [gunicorn](https://github.com/benoitc/gunicorn) from 20.1.0 to 22.0.0. - [Release notes](https://github.com/benoitc/gunicorn/releases) - [Commits](https://github.com/benoitc/gunicorn/compare/20.1.0...22.0.0) --- updated-dependencies: - dependency-name: gunicorn dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- ctfd/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ctfd/requirements.txt b/ctfd/requirements.txt index 03a11c4b8..ef13b2eda 100644 --- a/ctfd/requirements.txt +++ b/ctfd/requirements.txt @@ -19,7 +19,7 @@ passlib==1.7.4 bcrypt==4.0.1 requests==2.31.0 PyMySQL[rsa]==1.0.2 -gunicorn==20.1.0 +gunicorn==22.0.0 dataset==1.5.2 cmarkgfm==2022.10.27 redis==4.5.5 From 1bad7fdf96cb5a41f7ef845ef78f0c8a05d7fe76 Mon Sep 17 00:00:00 2001 From: Jude Date: Thu, 18 Apr 2024 11:03:13 -0700 Subject: [PATCH 66/70] Add open slides in new window button (#399) * add button to open slides in new window * strip /embed --- dojo_theme/templates/module.html | 1 + 1 file changed, 1 insertion(+) diff --git a/dojo_theme/templates/module.html b/dojo_theme/templates/module.html index d879083ca..11f7d650b 100644 --- a/dojo_theme/templates/module.html +++ b/dojo_theme/templates/module.html @@ -44,6 +44,7 @@

{{ resource.name }}

+ {% endif %} {% elif resource.type == "markdown" %} From a259f41de953c4ec903153f79403ec0d486dac0f Mon Sep 17 00:00:00 2001 From: Connor Nelson Date: Fri, 26 Apr 2024 13:14:23 -0700 Subject: [PATCH 67/70] Dojo: Support static and dynamic pages (#404) * Dojo: Support static and dynamic pages * Fix: dojo_route * Fix: newline * Fix: typo * Work --- dojo_plugin/models/__init__.py | 5 +++-- dojo_plugin/pages/dojo.py | 40 +++++++++++++++++++++++++++++----- dojo_plugin/utils/dojo.py | 4 ++++ dojo_theme/templates/dojo.html | 2 +- 4 files changed, 42 insertions(+), 9 deletions(-) diff --git a/dojo_plugin/models/__init__.py b/dojo_plugin/models/__init__.py index 4c0a1e527..f076a697b 100644 --- a/dojo_plugin/models/__init__.py +++ b/dojo_plugin/models/__init__.py @@ -71,9 +71,10 @@ class Dojos(db.Model): password = db.Column(db.String(128)) data = db.Column(db.JSON) - data_fields = ["type", "award", "comparator", "course", "importable"] + data_fields = ["type", "award", "course", "pages", "importable", "comparator"] data_defaults = { - "importable": True + "pages": [], + "importable": True, } users = db.relationship("DojoUsers", back_populates="dojo") diff --git a/dojo_plugin/pages/dojo.py b/dojo_plugin/pages/dojo.py index cfdda1e89..4733532db 100644 --- a/dojo_plugin/pages/dojo.py +++ b/dojo_plugin/pages/dojo.py @@ -2,16 +2,15 @@ import docker import pytz -from flask import Blueprint, render_template, redirect, abort +from flask import Blueprint, render_template, abort, send_file from CTFd.models import db, Solves, Challenges, Users from CTFd.utils.user import get_current_user -from CTFd.utils.decorators.visibility import check_challenge_visibility from CTFd.utils.helpers import get_infos from CTFd.cache import cache from ..utils import render_markdown, module_visible, module_challenges_visible, is_dojo_admin from ..utils.dojo import dojo_route, get_current_dojo_challenge -from ..models import Dojos, DojoUsers, DojoStudents +from ..models import Dojos, DojoUsers, DojoStudents, DojoModules dojo = Blueprint("pwncollege_dojo", __name__) @@ -46,7 +45,6 @@ def get_stats(dojo): @dojo.route("/") @dojo.route("//") @dojo_route -@check_challenge_visibility def listing(dojo): infos = get_infos() user = get_current_user() @@ -62,9 +60,19 @@ def listing(dojo): ) -@dojo.route("//") +@dojo.route("//") +@dojo.route("///") @dojo_route -@check_challenge_visibility +def view_dojo_path(dojo, path): + module = DojoModules.query.filter_by(dojo=dojo, id=path).first() + if module: + return view_module(dojo, module) + elif path in dojo.pages: + return view_page(dojo, path) + else: + abort(404) + + def view_module(dojo, module): user = get_current_user() user_solves = set(solve.challenge_id for solve in ( @@ -108,3 +116,23 @@ def view_module(dojo, module): current_dojo_challenge=current_dojo_challenge, assessments=assessments, ) + + +def view_page(dojo, page): + if (dojo.path / f"{page}.md").is_file(): + content = render_markdown((dojo.path / f"{page}.md").read_text()) + return render_template("markdown.html", dojo=dojo, content=content) + + elif (dojo.path / page).is_dir(): + user = get_current_user() + if user and (dojo.path / page / f"{user.id}").is_file(): + path = (dojo.path / page / f"{user.id}").resolve() + return send_file(path, as_attachment=True) + elif user and (dojo.path / page / f"{user.id}.md").is_file(): + content = render_markdown((dojo.path / page / f"{user.id}.md").read_text()) + return render_template("markdown.html", dojo=dojo, content=content) + elif (dojo.path / page / "default.md").is_file(): + content = render_markdown((dojo.path / page / "default.md").read_text()) + return render_template("markdown.html", dojo=dojo, content=content) + + abort(404) diff --git a/dojo_plugin/utils/dojo.py b/dojo_plugin/utils/dojo.py index 0ec51003e..63c113e90 100644 --- a/dojo_plugin/utils/dojo.py +++ b/dojo_plugin/utils/dojo.py @@ -109,6 +109,7 @@ }, )], }], + Optional("pages", default=[]): [str], Optional("files", default=[]): [ { "type": "download", @@ -341,6 +342,9 @@ def import_ids(attrs, *datas): students = yaml.safe_load(students_yml_path.read_text()) dojo.course["students"] = students + if dojo_data.get("pages"): + dojo.pages = dojo_data["pages"] + return dojo diff --git a/dojo_theme/templates/dojo.html b/dojo_theme/templates/dojo.html index 1469711f0..b104f5da2 100644 --- a/dojo_theme/templates/dojo.html +++ b/dojo_theme/templates/dojo.html @@ -65,7 +65,7 @@

Dojo Modules