Skip to content
13 changes: 9 additions & 4 deletions api/controllers/course.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package controllers

import (
"context"
"fmt"
"net/http"
"time"

Expand Down Expand Up @@ -302,15 +303,15 @@ func courseProfessor(flag string, c *gin.Context) {
if courseQuery, err = getQuery[schema.Course](flag, c); err != nil {
return
}

fmt.Println("getCourseQuery error:", courseQuery)
// determine the offset and limit for pagination stage and delete
// "offset" field in professorQuery
paginateMap, err := configs.GetAggregateLimit(&courseQuery, c)
if err != nil {
respond(c, http.StatusBadRequest, "Error offset is not type integer", err.Error())
return
}

fmt.Println("getCourseQuery error:", paginateMap)
// Pipeline to query the professors from the filtered courses
courseProfessorPipeline := mongo.Pipeline{
// filter the courses
Expand Down Expand Up @@ -344,15 +345,19 @@ func courseProfessor(flag string, c *gin.Context) {

// replace the courses with professors
bson.D{{Key: "$replaceWith", Value: "$professors"}},

bson.D{{Key: "$group", Value: bson.D{ // Groups all documents by professords _id
{Key: "_id", Value: "$_id"},
{Key: "doc", Value: bson.D{{Key: "$first", Value: "$$ROOT"}}}, // Keeps first full document
}}},
bson.D{{Key: "$replaceWith", Value: "$doc"}},
// keep order deterministic between calls
bson.D{{Key: "$sort", Value: bson.D{{Key: "_id", Value: 1}}}},

// paginate the professors
bson.D{{Key: "$skip", Value: paginateMap["latter_offset"]}},
bson.D{{Key: "$limit", Value: paginateMap["limit"]}},
}

fmt.Println(courseProfessorPipeline)
// perform aggregation on the pipeline
cursor, err := courseCollection.Aggregate(ctx, courseProfessorPipeline)
if err != nil {
Expand Down