Skip to content

Commit

Permalink
Add comments to the route handlers
Browse files Browse the repository at this point in the history
  • Loading branch information
StJudeWasHere committed Mar 27, 2024
1 parent ad991ca commit 806a298
Show file tree
Hide file tree
Showing 10 changed files with 44 additions and 123 deletions.
5 changes: 2 additions & 3 deletions internal/routes/app.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,9 @@ type PageView struct {
Refresh bool
}

// NewApp initializes the template renderer and the session cookie.
// Returns a new HTTP application server.
// NewServer sets up the HTTP server routes and starts the HTTP server.
func NewServer(container *services.Container) {
// Static
// Handle static files
fileServer := http.FileServer(http.Dir("./web/static"))
http.Handle("/resources/", http.StripPrefix("/resources", fileServer))
http.Handle("/robots.txt", fileServer)
Expand Down
40 changes: 11 additions & 29 deletions internal/routes/crawl.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,34 +27,30 @@ type crawlHandler struct {
}

// handleCrawl handles the crawling of a project.
// It expects a query parameter "pid" containing the project ID to be crawled.
// It expects a query parameter "pid" containing the project id to be crawled.
// In case the project requieres BasicAuth it will redirect the user to the BasicAuth
// credentials URL. Otherwise, it starts a new crawler.
func (h *crawlHandler) handleCrawl(w http.ResponseWriter, r *http.Request) {
pid, err := strconv.Atoi(r.URL.Query().Get("pid"))
if err != nil {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

user, ok := h.CookieSession.GetUser(r.Context())
if !ok {
http.Redirect(w, r, "/signout", http.StatusSeeOther)

return
}

p, err := h.ProjectService.FindProject(pid, user.Id)
if err != nil {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

if p.BasicAuth {
http.Redirect(w, r, "/crawl/auth?id="+strconv.Itoa(pid), http.StatusSeeOther)

return
}

Expand All @@ -72,27 +68,26 @@ func (h *crawlHandler) handleCrawl(w http.ResponseWriter, r *http.Request) {
}

// handleStopCrawl handles the crawler stopping.
// It expects a query paramater "pid" containinng the project ID that is being crawled.
// It expects a query paramater "pid" containinng the project id that is being crawled.
// Aftar making sure the user owns the project it is stopped.
// In case the request is made via ajax with the X-Requested-With header it will return
// a json response, otherwise it will redirect the user back to the live crawl page.
func (h *crawlHandler) handleStopCrawl(w http.ResponseWriter, r *http.Request) {
pid, err := strconv.Atoi(r.URL.Query().Get("pid"))
if err != nil {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

user, ok := h.CookieSession.GetUser(r.Context())
if !ok {
http.Redirect(w, r, "/signout", http.StatusSeeOther)

return
}

p, err := h.ProjectService.FindProject(pid, user.Id)
if err != nil {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

Expand All @@ -103,48 +98,42 @@ func (h *crawlHandler) handleStopCrawl(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "hlication/json")
w.WriteHeader(http.StatusCreated)
json.NewEncoder(w).Encode(data)

return
}

http.Redirect(w, r, "/crawl/live?pid="+strconv.Itoa(pid), http.StatusSeeOther)
}

// handleCrawlAuth handles the crawling of a project with BasicAuth.
// It expects a query parameter "pid" containing the project ID to be crawled.
// It expects a query parameter "pid" containing the project id to be crawled.
// A form will be presented to the user to input the BasicAuth credentials, once the
// form is submitted a crawler with BasicAuth is started.
//
// The function handles both GET and POST HTTP methods.
// GET: Renders the auth form.
// POST: Processes the auth form data and starts the crawler.
func (h *crawlHandler) handleCrawlAuth(w http.ResponseWriter, r *http.Request) {
pid, err := strconv.Atoi(r.URL.Query().Get("pid"))
if err != nil {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

user, ok := h.CookieSession.GetUser(r.Context())
if !ok {
http.Redirect(w, r, "/signout", http.StatusSeeOther)

return
}

p, err := h.ProjectService.FindProject(pid, user.Id)
if err != nil {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

if r.Method == http.MethodPost {
err := r.ParseForm()
if err != nil {
http.Redirect(w, r, "/crawl/auth", http.StatusSeeOther)

return
}

Expand Down Expand Up @@ -175,39 +164,36 @@ func (h *crawlHandler) handleCrawlAuth(w http.ResponseWriter, r *http.Request) {
}

// handleCrawlLive handles the request for the live crawling of a project.
// It expects a query parameter "pid" containing the project ID to be crawled.
// It expects a query parameter "pid" containing the project id to be crawled.
// This handler renders a page that will connect via websockets to display the progress
// of the crawl.
func (h *crawlHandler) handleCrawlLive(w http.ResponseWriter, r *http.Request) {
pid, err := strconv.Atoi(r.URL.Query().Get("pid"))
if err != nil {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

user, ok := h.CookieSession.GetUser(r.Context())
if !ok {
http.Redirect(w, r, "/signout", http.StatusSeeOther)

return
}

pv, err := h.ProjectViewService.GetProjectView(pid, user.Id)
if err != nil {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

if pv.Crawl.IssuesEnd.Valid {
http.Redirect(w, r, "/dashboard?pid="+strconv.Itoa(pid), http.StatusSeeOther)

return
}

configURL, err := url.Parse(h.Config.HTTPServer.URL)
if err != nil {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

Expand All @@ -227,27 +213,24 @@ func (h *crawlHandler) handleCrawlLive(w http.ResponseWriter, r *http.Request) {
}

// handleCrawlWs handles the live crawling of a project using websockets.
// It expects a query parameter "pid" containing the project ID.
// It expects a query parameter "pid" containing the project id.
// It upgrades the connection to websockets and sends the crawler messages through it.
func (h *crawlHandler) handleCrawlWs(w http.ResponseWriter, r *http.Request) {
pid, err := strconv.Atoi(r.URL.Query().Get("pid"))
if err != nil {
w.WriteHeader(http.StatusBadRequest)

return
}

user, ok := h.CookieSession.GetUser(r.Context())
if !ok {
w.WriteHeader(http.StatusUnauthorized)

return
}

p, err := h.ProjectService.FindProject(pid, user.Id)
if err != nil {
w.WriteHeader(http.StatusUnauthorized)

return
}

Expand All @@ -257,15 +240,13 @@ func (h *crawlHandler) handleCrawlWs(w http.ResponseWriter, r *http.Request) {
WriteBufferSize: 1024,
CheckOrigin: func(r *http.Request) bool {
origin := r.Header.Get("Origin")

return origin == h.Config.HTTPServer.URL
},
}

conn, err := upgrader.Upgrade(w, r, nil)
if err != nil {
log.Println(err)

log.Printf("crawlWS upgrader error: %v", err)
return
}
defer conn.Close()
Expand All @@ -275,6 +256,7 @@ func (h *crawlHandler) handleCrawlWs(w http.ResponseWriter, r *http.Request) {

connLock := &sync.RWMutex{}

// Subscribe to the pubsub broker to keep track of the crawl's progress.
subscriber := h.PubSubBroker.NewSubscriber(fmt.Sprintf("crawl-%d", p.Id), func(i *models.Message) error {
pubsubMessage := i
wsMessage := struct {
Expand Down
30 changes: 12 additions & 18 deletions internal/routes/dashboard.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,48 +12,42 @@ type dashboardHandler struct {
*services.Container
}

type DashboardView struct {
ProjectView *models.ProjectView
MediaChart *models.Chart
StatusChart *models.Chart
Crawls []models.Crawl
CanonicalCount *models.CanonicalCount
AltCount *models.AltCount
SchemeCount *models.SchemeCount
StatusCodeByDepth []models.StatusCodeByDepth
}

// handleDashboard handles the dashboard of a project.
// It expects a query parameter "pid" containing the project ID.
// handleDashboard handles the dashboard of a project with all the needed data to render
// the charts. It expects a query parameter "pid" containing the project id.
func (h *dashboardHandler) handleDashboard(w http.ResponseWriter, r *http.Request) {
user, ok := h.CookieSession.GetUser(r.Context())
if !ok {
http.Redirect(w, r, "/signout", http.StatusSeeOther)

return
}

pid, err := strconv.Atoi(r.URL.Query().Get("pid"))
if err != nil {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

pv, err := h.ProjectViewService.GetProjectView(pid, user.Id)
if err != nil {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

if pv.Crawl.TotalURLs == 0 {
http.Redirect(w, r, "/", http.StatusSeeOther)

return
}

data := DashboardView{
data := struct {
ProjectView *models.ProjectView
MediaChart *models.Chart
StatusChart *models.Chart
Crawls []models.Crawl
CanonicalCount *models.CanonicalCount
AltCount *models.AltCount
SchemeCount *models.SchemeCount
StatusCodeByDepth []models.StatusCodeByDepth
}{
ProjectView: pv,
MediaChart: h.ReportService.GetMediaCount(pv.Crawl.Id),
StatusChart: h.ReportService.GetStatusCount(pv.Crawl.Id),
Expand Down
2 changes: 1 addition & 1 deletion internal/routes/explorer.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ type explorerHandler struct {
// handleExplorer handles the URL explorer request.
// It performas a search of pagereports based on the "term" parameter. In case the "term" parameter
// is empty, it loads all the pagereports.
// It expects a query parameter "pid" containing the project ID, the "p" parameter containing the current
// It expects a query parameter "pid" containing the project id, the "p" parameter containing the current
// page in the paginator, and the "term" parameter used to perform the pagereport search.
func (h *explorerHandler) handleExplorer(w http.ResponseWriter, r *http.Request) {
// Get user from the request's context
Expand Down
Loading

0 comments on commit 806a298

Please sign in to comment.