diff --git a/api/controllers/controller_utils.go b/api/controllers/controller_utils.go index fc59467..53cf54b 100644 --- a/api/controllers/controller_utils.go +++ b/api/controllers/controller_utils.go @@ -1,16 +1,20 @@ package controllers import ( + "context" "fmt" "log" "net/http" + "time" + "github.com/UTDNebula/nebula-api/api/configs" "github.com/UTDNebula/nebula-api/api/schema" "github.com/getsentry/sentry-go" sentrygin "github.com/getsentry/sentry-go/gin" "github.com/gin-gonic/gin" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson/primitive" + "go.mongodb.org/mongo-driver/mongo" ) // Sets the API's response to a request, producing valid JSON given a status code and data. @@ -82,3 +86,260 @@ func objectIDFromParam(c *gin.Context, paramName string) (*primitive.ObjectID, e } return &objectId, nil } + +// Creates a context with the specified timeout and returns both context and cancel function. +// Common timeouts: 10s for standard queries, 30s for "all" operations. +func createContext(timeout time.Duration) (context.Context, context.CancelFunc) { + return context.WithTimeout(context.Background(), timeout) +} + +// Generic function to handle Find operations with pagination. +// Reduces boilerplate for search endpoints by handling query building, finding, decoding, and responding. +func findAndRespond[T any](c *gin.Context, collection *mongo.Collection, timeout time.Duration) { + ctx, cancel := createContext(timeout) + defer cancel() + + var results []T + + // Build query key-value pairs + query, err := getQuery[T]("Search", c) + if err != nil { + return // getQuery already responds with error + } + + // Get pagination options + optionLimit, err := configs.GetOptionLimit(&query, c) + if err != nil { + respond(c, http.StatusBadRequest, "offset is not type integer", err.Error()) + return + } + + // Execute find query + cursor, err := collection.Find(ctx, query, optionLimit) + if err != nil { + respondWithInternalError(c, err) + return + } + + // Decode all results + if err = cursor.All(ctx, &results); err != nil { + respondWithInternalError(c, err) + return + } + + // Return results + respond(c, http.StatusOK, "success", results) +} + +// Generic function to handle FindOne operations by ID. +// Reduces boilerplate for ById endpoints by handling query building, finding one, decoding, and responding. +func findOneByIdAndRespond[T any](c *gin.Context, collection *mongo.Collection, timeout time.Duration) { + ctx, cancel := createContext(timeout) + defer cancel() + + var result T + + // Parse object ID from parameter + query, err := getQuery[T]("ById", c) + if err != nil { + return // getQuery already responds with error + } + + // Find and decode matching document + err = collection.FindOne(ctx, query).Decode(&result) + if err != nil { + respondWithInternalError(c, err) + return + } + + // Return result + respond(c, http.StatusOK, "success", result) +} + +// Generic function to handle FindAll operations without filters. +// Reduces boilerplate for "all" endpoints by finding all documents and responding. +func findAllAndRespond[T any](c *gin.Context, collection *mongo.Collection, timeout time.Duration) { + ctx, cancel := createContext(timeout) + defer cancel() + + var results []T + + // Find all documents + cursor, err := collection.Find(ctx, bson.M{}) + if err != nil { + respondWithInternalError(c, err) + return + } + + // Decode all results + if err = cursor.All(ctx, &results); err != nil { + respondWithInternalError(c, err) + return + } + + // Return results + respond(c, http.StatusOK, "success", results) +} + +// Generic function to handle Aggregate operations. +// Reduces boilerplate for aggregate endpoints by executing pipeline, decoding, and responding. +func aggregateAndRespond[T any](c *gin.Context, collection *mongo.Collection, pipeline mongo.Pipeline, timeout time.Duration) { + ctx, cancel := createContext(timeout) + defer cancel() + + var results []T + + // Execute aggregation pipeline + cursor, err := collection.Aggregate(ctx, pipeline) + if err != nil { + respondWithInternalError(c, err) + return + } + + // Decode all results + if err = cursor.All(ctx, &results); err != nil { + respondWithInternalError(c, err) + return + } + + // Return results + respond(c, http.StatusOK, "success", results) +} + +// Builds a standard lookup stage for MongoDB aggregation pipeline. +// Used to join collections by matching a local field to a foreign field. +func buildLookupStage(fromCollection, localField, foreignField, asField string) bson.D { + return bson.D{{Key: "$lookup", Value: bson.D{ + {Key: "from", Value: fromCollection}, + {Key: "localField", Value: localField}, + {Key: "foreignField", Value: foreignField}, + {Key: "as", Value: asField}, + }}} +} + +// Builds a standard unwind stage for MongoDB aggregation pipeline. +// Used to deconstruct an array field into separate documents. +func buildUnwindStage(path string, preserveNullAndEmptyArrays bool) bson.D { + return bson.D{{Key: "$unwind", Value: bson.D{ + {Key: "path", Value: path}, + {Key: "preserveNullAndEmptyArrays", Value: preserveNullAndEmptyArrays}, + }}} +} + +// Builds a standard project stage for MongoDB aggregation pipeline. +// Used to include/exclude fields or compute new fields. +func buildProjectStage(fields bson.D) bson.D { + return bson.D{{Key: "$project", Value: fields}} +} + +// Builds a standard replaceWith stage for MongoDB aggregation pipeline. +// Used to replace the root document with a specified document. +func buildReplaceWithStage(newRoot string) bson.D { + return bson.D{{Key: "$replaceWith", Value: newRoot}} +} + +// Builds a standard sort stage for MongoDB aggregation pipeline. +// Used to order documents by specified fields. +func buildSortStage(sortFields bson.D) bson.D { + return bson.D{{Key: "$sort", Value: sortFields}} +} + +// Builds standard pagination stages (skip and limit) for MongoDB aggregation pipeline. +// Returns two stages: skip and limit. +func buildPaginationStages(offset, limit interface{}) []bson.D { + return []bson.D{ + {{Key: "$skip", Value: offset}}, + {{Key: "$limit", Value: limit}}, + } +} + +// PipelineConfig holds configuration for building relation query pipelines. +type PipelineConfig struct { + MatchQuery bson.M + PaginateMap map[string]interface{} + LookupFrom string + LookupLocalField string + LookupForeignField string + LookupAs string + UnwindPath string + ProjectFields bson.D + ReplaceWithField string + NeedsPagination bool + NeedsProjectStage bool +} + +// Builds a standard pipeline for querying related entities. +// This handles the common pattern of: match -> paginate former -> lookup -> [project] -> unwind -> replace -> sort -> paginate latter +func buildRelationPipeline(config PipelineConfig) mongo.Pipeline { + pipeline := mongo.Pipeline{ + // Filter the source entities + bson.D{{Key: "$match", Value: config.MatchQuery}}, + } + + // Paginate the source entities before looking up related entities + if config.NeedsPagination { + formerStages := buildPaginationStages(config.PaginateMap["former_offset"], config.PaginateMap["limit"]) + pipeline = append(pipeline, formerStages...) + } + + // Lookup the related entities + pipeline = append(pipeline, buildLookupStage( + config.LookupFrom, + config.LookupLocalField, + config.LookupForeignField, + config.LookupAs, + )) + + // Optionally project to extract nested fields + if config.NeedsProjectStage { + pipeline = append(pipeline, buildProjectStage(config.ProjectFields)) + } + + // Unwind the related entities + pipeline = append(pipeline, buildUnwindStage(config.UnwindPath, false)) + + // Replace root document with the related entity + pipeline = append(pipeline, buildReplaceWithStage(config.ReplaceWithField)) + + // Keep order deterministic between calls + pipeline = append(pipeline, buildSortStage(bson.D{{Key: "_id", Value: 1}})) + + // Paginate the related entities + if config.NeedsPagination { + latterStages := buildPaginationStages(config.PaginateMap["latter_offset"], config.PaginateMap["limit"]) + pipeline = append(pipeline, latterStages...) + } + + return pipeline +} + +// Generic function to handle relation queries (e.g., getting sections of courses, courses of professors). +// Reduces boilerplate by handling query building, pagination, aggregation, and responding. +func queryRelatedEntitiesAndRespond[TResult any]( + c *gin.Context, + collection *mongo.Collection, + config PipelineConfig, + timeout time.Duration, +) { + ctx, cancel := createContext(timeout) + defer cancel() + + var results []TResult + + // Build and execute pipeline + pipeline := buildRelationPipeline(config) + cursor, err := collection.Aggregate(ctx, pipeline) + if err != nil { + respondWithInternalError(c, err) + return + } + + // Decode all results + if err = cursor.All(ctx, &results); err != nil { + respondWithInternalError(c, err) + return + } + + // Return results + respond(c, http.StatusOK, "success", results) +} \ No newline at end of file diff --git a/api/controllers/course.go b/api/controllers/course.go index 4264bbc..0e22b8a 100644 --- a/api/controllers/course.go +++ b/api/controllers/course.go @@ -39,41 +39,7 @@ var courseCollection *mongo.Collection = configs.GetCollection("courses") // @Failure 500 {object} schema.APIResponse[string] "A string describing the error" // @Failure 400 {object} schema.APIResponse[string] "A string describing the error" func CourseSearch(c *gin.Context) { - //name := c.Query("name") // value of specific query parameter: string - //queryParams := c.Request.URL.Query() // map of all query params: map[string][]string - - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - var courses []schema.Course - - // build query key value pairs (only one value per key) - query, err := getQuery[schema.Course]("Search", c) - if err != nil { - return - } - - optionLimit, err := configs.GetOptionLimit(&query, c) - if err != nil { - respond(c, http.StatusBadRequest, "offset is not type integer", err.Error()) - return - } - - // get cursor for query results - cursor, err := courseCollection.Find(ctx, query, optionLimit) - if err != nil { - respondWithInternalError(c, err) - return - } - - // retrieve and parse all valid documents - if err = cursor.All(ctx, &courses); err != nil { - respondWithInternalError(c, err) - return - } - - // return result - respond(c, http.StatusOK, "success", courses) + findAndRespond[schema.Course](c, courseCollection, 10*time.Second) } // @Id courseById @@ -85,26 +51,7 @@ func CourseSearch(c *gin.Context) { // @Success 200 {object} schema.APIResponse[schema.Course] "A course" // @Failure 500 {object} schema.APIResponse[string] "A string describing the error" func CourseById(c *gin.Context) { - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - var course schema.Course - - // parse object id from id parameter - query, err := getQuery[schema.Course]("ById", c) - if err != nil { - return - } - - // find and parse matching course - err = courseCollection.FindOne(ctx, query).Decode(&course) - if err != nil { - respondWithInternalError(c, err) - return - } - - // return result - respond(c, http.StatusOK, "success", course) + findOneByIdAndRespond[schema.Course](c, courseCollection, 10*time.Second) } // @Id courseAll @@ -115,27 +62,7 @@ func CourseById(c *gin.Context) { // @Success 200 {object} schema.APIResponse[[]schema.Course] "All courses" // @Failure 500 {object} schema.APIResponse[string] "A string describing the error" func CourseAll(c *gin.Context) { - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - - var courses []schema.Course - - defer cancel() - - cursor, err := courseCollection.Find(ctx, bson.M{}) - - if err != nil { - respondWithInternalError(c, err) - return - } - - // retrieve and parse all valid documents - if err = cursor.All(ctx, &courses); err != nil { - respondWithInternalError(c, err) - return - } - - // return result - respond(c, http.StatusOK, "success", courses) + findAllAndRespond[schema.Course](c, courseCollection, 30*time.Second) } // @Id courseSectionSearch @@ -183,72 +110,38 @@ func CourseSectionById() gin.HandlerFunc { // get the sections of the courses, filters depending on the flag func courseSection(flag string, c *gin.Context) { - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - var courseSections []schema.Section // the list of sections of the filtered courses - var courseQuery bson.M // query of the courses (or the single course) - var err error // error - - // determine the course query - courseQuery, err = getQuery[schema.Course](flag, c) + // Determine the course query + courseQuery, err := getQuery[schema.Course](flag, c) if err != nil { return } - // determine the offset and limit for pagination stage & delete "offset" fields in professorQuery + // Determine the offset and limit for pagination stage & delete "offset" fields in courseQuery paginateMap, err := configs.GetAggregateLimit(&courseQuery, c) if err != nil { respond(c, http.StatusBadRequest, "Error offset is not type integer", err.Error()) return } - // pipeline to query the sections from the filtered courses - courseSectionPipeline := mongo.Pipeline{ - // filter the courses - bson.D{{Key: "$match", Value: courseQuery}}, - - // paginate the courses before pulling the sections from thoses courses - bson.D{{Key: "$skip", Value: paginateMap["former_offset"]}}, // skip to the specified offset - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, // limit to the specified number of courses - - // lookup the sections of the courses - bson.D{{Key: "$lookup", Value: bson.D{ - {Key: "from", Value: "sections"}, - {Key: "localField", Value: "sections"}, - {Key: "foreignField", Value: "_id"}, - {Key: "as", Value: "sections"}, - }}}, - - // unwind the sections of the courses - bson.D{{Key: "$unwind", Value: bson.D{ - {Key: "path", Value: "$sections"}, - {Key: "preserveNullAndEmptyArrays", Value: false}, // avoid course documents that can't be replaced - }}}, - - // replace the courses with sections - bson.D{{Key: "$replaceWith", Value: "$sections"}}, - - // keep order deterministic between calls - bson.D{{Key: "$sort", Value: bson.D{{Key: "_id", Value: 1}}}}, - - // paginate the sections - bson.D{{Key: "$skip", Value: paginateMap["latter_offset"]}}, - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, + // Configure and execute the pipeline to query sections from filtered courses + config := PipelineConfig{ + MatchQuery: courseQuery, + PaginateMap: map[string]interface{}{ + "former_offset": paginateMap["former_offset"], + "latter_offset": paginateMap["latter_offset"], + "limit": paginateMap["limit"], + }, + LookupFrom: "sections", + LookupLocalField: "sections", + LookupForeignField: "_id", + LookupAs: "sections", + UnwindPath: "$sections", + ReplaceWithField: "$sections", + NeedsPagination: true, + NeedsProjectStage: false, } - // perform aggregation on the pipeline - cursor, err := courseCollection.Aggregate(ctx, courseSectionPipeline) - if err != nil { - respondWithInternalError(c, err) - return - } - if err = cursor.All(ctx, &courseSections); err != nil { - respondWithInternalError(c, err) - return - } - - respond(c, http.StatusOK, "success", courseSections) + queryRelatedEntitiesAndRespond[schema.Section](c, courseCollection, config, 10*time.Second) } // @Id courseProfessorSearch @@ -303,8 +196,7 @@ func courseProfessor(flag string, c *gin.Context) { return } - // determine the offset and limit for pagination stage and delete - // "offset" field in professorQuery + // Determine the offset and limit for pagination stage and delete "offset" field in courseQuery paginateMap, err := configs.GetAggregateLimit(&courseQuery, c) if err != nil { respond(c, http.StatusBadRequest, "Error offset is not type integer", err.Error()) @@ -312,55 +204,44 @@ func courseProfessor(flag string, c *gin.Context) { } // Pipeline to query the professors from the filtered courses + // This is more complex than standard relation queries because it requires two lookups: + // courses -> sections -> professors courseProfessorPipeline := mongo.Pipeline{ - // filter the courses + // Filter the courses bson.D{{Key: "$match", Value: courseQuery}}, + } - // paginate the courses before pulling the sections from those courses - bson.D{{Key: "$skip", Value: paginateMap["former_offset"]}}, - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, + // Paginate the courses before pulling the sections from those courses + formerStages := buildPaginationStages(paginateMap["former_offset"], paginateMap["limit"]) + courseProfessorPipeline = append(courseProfessorPipeline, formerStages...) - // lookup the sections of the courses - bson.D{{Key: "$lookup", Value: bson.D{ - {Key: "from", Value: "sections"}, - {Key: "localField", Value: "sections"}, - {Key: "foreignField", Value: "_id"}, - {Key: "as", Value: "sections"}, - }}}, + // Lookup the sections of the courses + courseProfessorPipeline = append(courseProfessorPipeline, buildLookupStage("sections", "sections", "_id", "sections")) - // lookup the professors of the sections - bson.D{{Key: "$lookup", Value: bson.D{ - {Key: "from", Value: "professors"}, - {Key: "localField", Value: "sections.professors"}, - {Key: "foreignField", Value: "_id"}, - {Key: "as", Value: "professors"}, - }}}, + // Lookup the professors of the sections + courseProfessorPipeline = append(courseProfessorPipeline, buildLookupStage("professors", "sections.professors", "_id", "professors")) - // unwind the professors of the sections - bson.D{{Key: "$unwind", Value: bson.D{ - {Key: "path", Value: "$professors"}, - {Key: "preserveNullAndEmptyArrays", Value: false}, // avoid course documents that can't be replaced - }}}, + // Unwind the professors of the sections + courseProfessorPipeline = append(courseProfessorPipeline, buildUnwindStage("$professors", false)) - // replace the courses with professors - bson.D{{Key: "$replaceWith", Value: "$professors"}}, + // Replace the courses with professors + courseProfessorPipeline = append(courseProfessorPipeline, buildReplaceWithStage("$professors")) - // keep order deterministic between calls - bson.D{{Key: "$sort", Value: bson.D{{Key: "_id", Value: 1}}}}, + // Keep order deterministic between calls + courseProfessorPipeline = append(courseProfessorPipeline, buildSortStage(bson.D{{Key: "_id", Value: 1}})) - // paginate the professors - bson.D{{Key: "$skip", Value: paginateMap["latter_offset"]}}, - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, - } + // Paginate the professors + latterStages := buildPaginationStages(paginateMap["latter_offset"], paginateMap["limit"]) + courseProfessorPipeline = append(courseProfessorPipeline, latterStages...) - // perform aggregation on the pipeline + // Perform aggregation on the pipeline cursor, err := courseCollection.Aggregate(ctx, courseProfessorPipeline) if err != nil { - // return error for any aggregation problem + // Return error for any aggregation problem respondWithInternalError(c, err) return } - // parse the array of professors of the course + // Parse the array of professors of the course if err = cursor.All(ctx, &courseProfessors); err != nil { panic(err) } diff --git a/api/controllers/professor.go b/api/controllers/professor.go index 3e51576..5ea9c1a 100644 --- a/api/controllers/professor.go +++ b/api/controllers/professor.go @@ -46,41 +46,7 @@ var professorCollection *mongo.Collection = configs.GetCollection("professors") // @Failure 500 {object} schema.APIResponse[string] "A string describing the error" // @Failure 400 {object} schema.APIResponse[string] "A string describing the error" func ProfessorSearch(c *gin.Context) { - //name := c.Query("name") // value of specific query parameter: string - //queryParams := c.Request.URL.Query() // map of all query params: map[string][]string - - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - var professors []schema.Professor - - // build query key value pairs (only one value per key) - query, err := getQuery[schema.Professor]("Search", c) - if err != nil { - return - } - - optionLimit, err := configs.GetOptionLimit(&query, c) - if err != nil { - respond(c, http.StatusBadRequest, "offset is not type integer", err.Error()) - return - } - - // get cursor for query results - cursor, err := professorCollection.Find(ctx, query, optionLimit) - if err != nil { - respondWithInternalError(c, err) - return - } - - // retrieve and parse all valid documents - if err = cursor.All(ctx, &professors); err != nil { - respondWithInternalError(c, err) - return - } - - // return result - respond(c, http.StatusOK, "success", professors) + findAndRespond[schema.Professor](c, professorCollection, 10*time.Second) } // @Id professorById @@ -93,26 +59,7 @@ func ProfessorSearch(c *gin.Context) { // @Failure 500 {object} schema.APIResponse[string] "A string describing the error" // @Failure 400 {object} schema.APIResponse[string] "A string describing the error" func ProfessorById(c *gin.Context) { - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - var professor schema.Professor - - // parse object id from id parameter - query, err := getQuery[schema.Professor]("ById", c) - if err != nil { - return - } - - // find and parse matching professor - err = professorCollection.FindOne(ctx, query).Decode(&professor) - if err != nil { - respondWithInternalError(c, err) - return - } - - // return result - respond(c, http.StatusOK, "success", professor) + findOneByIdAndRespond[schema.Professor](c, professorCollection, 10*time.Second) } // @Id professorAll @@ -123,26 +70,7 @@ func ProfessorById(c *gin.Context) { // @Success 200 {object} schema.APIResponse[[]schema.Professor] "All professors" // @Failure 500 {object} schema.APIResponse[string] "A string describing the error" func ProfessorAll(c *gin.Context) { - ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - - var professors []schema.Professor - - cursor, err := professorCollection.Find(ctx, bson.M{}) - - if err != nil { - respondWithInternalError(c, err) - return - } - - // retrieve and parse all valid documents - if err = cursor.All(ctx, &professors); err != nil { - respondWithInternalError(c, err) - return - } - - // return result - respond(c, http.StatusOK, "success", professors) + findAllAndRespond[schema.Professor](c, professorCollection, 30*time.Second) } // @Id professorCourseSearch @@ -206,14 +134,13 @@ func professorCourse(flag string, c *gin.Context) { var professorQuery bson.M // query filter the professor var err error - // determine the professor's query + // Determine the professor's query professorQuery, err = getQuery[schema.Professor](flag, c) if err != nil { return } - // determine the offset and limit for pagination stage - // and delete "offset" field in professorQuery + // Determine the offset and limit for pagination stage and delete "offset" field in professorQuery paginateMap, err := configs.GetAggregateLimit(&professorQuery, c) if err != nil { respond(c, http.StatusBadRequest, "offset is not type integer", err.Error()) @@ -221,54 +148,42 @@ func professorCourse(flag string, c *gin.Context) { } // Pipeline to query the courses from the filtered professors (or a single professor) + // This is more complex than standard relation queries because it requires extracting courses from sections professorCoursePipeline := mongo.Pipeline{ - // filter the professors + // Filter the professors bson.D{{Key: "$match", Value: professorQuery}}, + } - // paginate the professors before pulling the courses from those professor - bson.D{{Key: "$skip", Value: paginateMap["former_offset"]}}, // skip to the specified offset - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, // limit to the specified number of professors + // Paginate the professors before pulling the courses from those professors + formerStages := buildPaginationStages(paginateMap["former_offset"], paginateMap["limit"]) + professorCoursePipeline = append(professorCoursePipeline, formerStages...) - // lookup the array of sections from sections collection - bson.D{{Key: "$lookup", Value: bson.D{ - {Key: "from", Value: "sections"}, - {Key: "localField", Value: "sections"}, - {Key: "foreignField", Value: "_id"}, - {Key: "as", Value: "sections"}, - }}}, + // Lookup the array of sections from sections collection + professorCoursePipeline = append(professorCoursePipeline, buildLookupStage("sections", "sections", "_id", "sections")) - // project the courses referenced by each section in the array - bson.D{{Key: "$project", Value: bson.D{{Key: "courses", Value: "$sections.course_reference"}}}}, + // Project the courses referenced by each section in the array + professorCoursePipeline = append(professorCoursePipeline, buildProjectStage(bson.D{{Key: "courses", Value: "$sections.course_reference"}})) - // lookup the array of courses from coures collection - bson.D{{Key: "$lookup", Value: bson.D{ - {Key: "from", Value: "courses"}, - {Key: "localField", Value: "courses"}, - {Key: "foreignField", Value: "_id"}, - {Key: "as", Value: "courses"}, - }}}, + // Lookup the array of courses from courses collection + professorCoursePipeline = append(professorCoursePipeline, buildLookupStage("courses", "courses", "_id", "courses")) - // unwind the courses - bson.D{{Key: "$unwind", Value: bson.D{ - {Key: "path", Value: "$courses"}, - {Key: "preserveNullAndEmptyArrays", Value: false}, // to avoid the professor documents that can't be replaced - }}}, + // Unwind the courses + professorCoursePipeline = append(professorCoursePipeline, buildUnwindStage("$courses", false)) - // replace the combination of ids and courses with the courses entirely - bson.D{{Key: "$replaceWith", Value: "$courses"}}, + // Replace the combination of ids and courses with the courses entirely + professorCoursePipeline = append(professorCoursePipeline, buildReplaceWithStage("$courses")) - // keep order deterministic between calls - bson.D{{Key: "$sort", Value: bson.D{{Key: "_id", Value: 1}}}}, + // Keep order deterministic between calls + professorCoursePipeline = append(professorCoursePipeline, buildSortStage(bson.D{{Key: "_id", Value: 1}})) - // paginate the courses - bson.D{{Key: "$skip", Value: paginateMap["latter_offset"]}}, - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, - } + // Paginate the courses + latterStages := buildPaginationStages(paginateMap["latter_offset"], paginateMap["limit"]) + professorCoursePipeline = append(professorCoursePipeline, latterStages...) - // Perform aggreration on the pipeline + // Perform aggregation on the pipeline cursor, err := professorCollection.Aggregate(ctx, professorCoursePipeline) if err != nil { - // return the error with there's something wrong with the aggregation + // Return the error if there's something wrong with the aggregation respondWithInternalError(c, err) return } @@ -339,60 +254,52 @@ func professorSection(flag string, c *gin.Context) { var professorQuery bson.M // query filter the professor var err error - // determine the professor's query + // Determine the professor's query professorQuery, err = getQuery[schema.Professor](flag, c) if err != nil { return } - // determine the offset and limit for pagination stage + // Determine the offset and limit for pagination stage paginateMap, err := configs.GetAggregateLimit(&professorQuery, c) if err != nil { respond(c, http.StatusBadRequest, "offset is not type integer", err.Error()) return } - // Pipeline to query the courses from the filtered professors (or a single professor) + // Pipeline to query the sections from the filtered professors (or a single professor) professorSectionPipeline := mongo.Pipeline{ - // filter the professors + // Filter the professors bson.D{{Key: "$match", Value: professorQuery}}, + } - // paginate the professors before pulling the courses from those professor - bson.D{{Key: "$skip", Value: paginateMap["former_offset"]}}, // skip to the specified offset - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, // limit to the specified number of professors + // Paginate the professors before pulling the sections from those professors + formerStages := buildPaginationStages(paginateMap["former_offset"], paginateMap["limit"]) + professorSectionPipeline = append(professorSectionPipeline, formerStages...) - // lookup the array of sections from sections collection - bson.D{{Key: "$lookup", Value: bson.D{ - {Key: "from", Value: "sections"}, - {Key: "localField", Value: "sections"}, - {Key: "foreignField", Value: "_id"}, - {Key: "as", Value: "sections"}, - }}}, + // Lookup the array of sections from sections collection + professorSectionPipeline = append(professorSectionPipeline, buildLookupStage("sections", "sections", "_id", "sections")) - // project the sections - bson.D{{Key: "$project", Value: bson.D{{Key: "sections", Value: "$sections"}}}}, + // Project the sections + professorSectionPipeline = append(professorSectionPipeline, buildProjectStage(bson.D{{Key: "sections", Value: "$sections"}})) - // unwind the sections - bson.D{{Key: "$unwind", Value: bson.D{ - {Key: "path", Value: "$sections"}, - {Key: "preserveNullAndEmptyArrays", Value: false}, // to avoid the professor documents that can't be replaced - }}}, + // Unwind the sections + professorSectionPipeline = append(professorSectionPipeline, buildUnwindStage("$sections", false)) - // replace the combination of ids and sections with the sections entirely - bson.D{{Key: "$replaceWith", Value: "$sections"}}, + // Replace the combination of ids and sections with the sections entirely + professorSectionPipeline = append(professorSectionPipeline, buildReplaceWithStage("$sections")) - // keep order deterministic between calls - bson.D{{Key: "$sort", Value: bson.D{{Key: "_id", Value: 1}}}}, + // Keep order deterministic between calls + professorSectionPipeline = append(professorSectionPipeline, buildSortStage(bson.D{{Key: "_id", Value: 1}})) - // paginate the sections - bson.D{{Key: "$skip", Value: paginateMap["latter_offset"]}}, - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, - } + // Paginate the sections + latterStages := buildPaginationStages(paginateMap["latter_offset"], paginateMap["limit"]) + professorSectionPipeline = append(professorSectionPipeline, latterStages...) - // Perform aggreration on the pipeline + // Perform aggregation on the pipeline cursor, err := professorCollection.Aggregate(ctx, professorSectionPipeline) if err != nil { - // return the error with there's something wrong with the aggregation + // Return the error if there's something wrong with the aggregation respondWithInternalError(c, err) return } @@ -471,4 +378,4 @@ func TrendsProfessorSectionSearch(c *gin.Context) { respond(c, http.StatusOK, "success", results) -} +} \ No newline at end of file diff --git a/api/controllers/section.go b/api/controllers/section.go index 8ff8241..3ebb112 100644 --- a/api/controllers/section.go +++ b/api/controllers/section.go @@ -48,41 +48,7 @@ var sectionCollection *mongo.Collection = configs.GetCollection("sections") // @Failure 500 {object} schema.APIResponse[string] "A string describing the error" // @Failure 400 {object} schema.APIResponse[string] "A string describing the error" func SectionSearch(c *gin.Context) { - //name := c.Query("name") // value of specific query parameter: string - //queryParams := c.Request.URL.Query() // map of all query params: map[string][]string - - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - var sections []schema.Section - - // build query key value pairs (only one value per key) - query, err := getQuery[schema.Section]("Search", c) - if err != nil { - return - } - - optionLimit, err := configs.GetOptionLimit(&query, c) - if err != nil { - respond(c, http.StatusBadRequest, "offset is not type integer", err.Error()) - return - } - - // get cursor for query results - cursor, err := sectionCollection.Find(ctx, query, optionLimit) - if err != nil { - respondWithInternalError(c, err) - return - } - - // retrieve and parse all valid documents - if err = cursor.All(ctx, §ions); err != nil { - respondWithInternalError(c, err) - return - } - - // return result - respond(c, http.StatusOK, "success", sections) + findAndRespond[schema.Section](c, sectionCollection, 10*time.Second) } // @Id sectionById @@ -95,26 +61,7 @@ func SectionSearch(c *gin.Context) { // @Failure 500 {object} schema.APIResponse[string] "A string describing the error" // @Failure 400 {object} schema.APIResponse[string] "A string describing the error" func SectionById(c *gin.Context) { - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - - var section schema.Section - - // parse object id from id parameter - query, err := getQuery[schema.Section]("ById", c) - if err != nil { - return - } - - // find and parse matching section - err = sectionCollection.FindOne(ctx, query).Decode(§ion) - if err != nil { - respondWithInternalError(c, err) - return - } - - // return result - respond(c, http.StatusOK, "success", section) + findOneByIdAndRespond[schema.Section](c, sectionCollection, 10*time.Second) } // @Id sectionCourseSearch @@ -169,7 +116,7 @@ func SectionCourseById() gin.HandlerFunc { } } -// Get an array of courses from sections, filtered based on the the flag +// Get an array of courses from sections, filtered based on the flag func sectionCourse(flag string, c *gin.Context) { ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() @@ -187,42 +134,34 @@ func sectionCourse(flag string, c *gin.Context) { return } - // pipeline of query an array of courses from filtered sections + // Pipeline to query an array of courses from filtered sections sectionCoursePipeline := mongo.Pipeline{ - // filter the sections + // Filter the sections bson.D{{Key: "$match", Value: sectionQuery}}, + } - // paginate the sections before pulling courses from those sections - bson.D{{Key: "$skip", Value: paginateMap["former_offset"]}}, - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, + // Paginate the sections before pulling courses from those sections + formerStages := buildPaginationStages(paginateMap["former_offset"], paginateMap["limit"]) + sectionCoursePipeline = append(sectionCoursePipeline, formerStages...) - // lookup the course referenced by sections from the course collection - bson.D{{Key: "$lookup", Value: bson.D{ - {Key: "from", Value: "courses"}, - {Key: "localField", Value: "course_reference"}, - {Key: "foreignField", Value: "_id"}, - {Key: "as", Value: "course_reference"}, - }}}, + // Lookup the course referenced by sections from the course collection + sectionCoursePipeline = append(sectionCoursePipeline, buildLookupStage("courses", "course_reference", "_id", "course_reference")) - // project to remove every other fields except for courses - bson.D{{Key: "$project", Value: bson.D{{Key: "courses", Value: "$course_reference"}}}}, + // Project to remove every other field except for courses + sectionCoursePipeline = append(sectionCoursePipeline, buildProjectStage(bson.D{{Key: "courses", Value: "$course_reference"}})) - // unwind the courses - bson.D{{Key: "$unwind", Value: bson.D{ - {Key: "path", Value: "$courses"}, - {Key: "preserveNullAndEmptyArrays", Value: false}, - }}}, + // Unwind the courses + sectionCoursePipeline = append(sectionCoursePipeline, buildUnwindStage("$courses", false)) - // replace the combinations of id and course with courses entirely - bson.D{{Key: "$replaceWith", Value: "$courses"}}, + // Replace the combinations of id and course with courses entirely + sectionCoursePipeline = append(sectionCoursePipeline, buildReplaceWithStage("$courses")) - // keep order deterministic between calls - bson.D{{Key: "$sort", Value: bson.D{{Key: "_id", Value: 1}}}}, + // Keep order deterministic between calls + sectionCoursePipeline = append(sectionCoursePipeline, buildSortStage(bson.D{{Key: "_id", Value: 1}})) - // paginate the courses - bson.D{{Key: "$skip", Value: paginateMap["latter_offset"]}}, - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, - } + // Paginate the courses + latterStages := buildPaginationStages(paginateMap["latter_offset"], paginateMap["limit"]) + sectionCoursePipeline = append(sectionCoursePipeline, latterStages...) cursor, err := sectionCollection.Aggregate(ctx, sectionCoursePipeline) if err != nil { @@ -299,7 +238,7 @@ func SectionProfessorById() gin.HandlerFunc { } } -// Get an array of professors from sections, +// Get an array of professors from sections func sectionProfessor(flag string, c *gin.Context) { ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() @@ -317,34 +256,34 @@ func sectionProfessor(flag string, c *gin.Context) { return } - // pipeline to query an array of professors from filtered sections + // Pipeline to query an array of professors from filtered sections sectionProfessorPipeline := mongo.Pipeline{ + // Filter the sections bson.D{{Key: "$match", Value: sectionQuery}}, + } - bson.D{{Key: "$skip", Value: paginateMap["former_offset"]}}, - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, + // Paginate the sections before pulling professors from those sections + formerStages := buildPaginationStages(paginateMap["former_offset"], paginateMap["limit"]) + sectionProfessorPipeline = append(sectionProfessorPipeline, formerStages...) - bson.D{{Key: "$lookup", Value: bson.D{ - {Key: "from", Value: "professors"}, - {Key: "localField", Value: "professors"}, - {Key: "foreignField", Value: "_id"}, - {Key: "as", Value: "professors"}, - }}}, + // Lookup the professors from the professors collection + sectionProfessorPipeline = append(sectionProfessorPipeline, buildLookupStage("professors", "professors", "_id", "professors")) - bson.D{{Key: "$project", Value: bson.D{{Key: "professors", Value: "$professors"}}}}, + // Project to extract professors + sectionProfessorPipeline = append(sectionProfessorPipeline, buildProjectStage(bson.D{{Key: "professors", Value: "$professors"}})) - bson.D{{Key: "$unwind", Value: bson.D{ - {Key: "path", Value: "$professors"}, - {Key: "preserveNullAndEmptyArrays", Value: false}, - }}}, + // Unwind the professors + sectionProfessorPipeline = append(sectionProfessorPipeline, buildUnwindStage("$professors", false)) - bson.D{{Key: "$replaceWith", Value: "$professors"}}, + // Replace the root with professors + sectionProfessorPipeline = append(sectionProfessorPipeline, buildReplaceWithStage("$professors")) - bson.D{{Key: "$sort", Value: bson.D{{Key: "_id", Value: 1}}}}, + // Keep order deterministic between calls + sectionProfessorPipeline = append(sectionProfessorPipeline, buildSortStage(bson.D{{Key: "_id", Value: 1}})) - bson.D{{Key: "$skip", Value: paginateMap["latter_offset"]}}, - bson.D{{Key: "$limit", Value: paginateMap["limit"]}}, - } + // Paginate the professors + latterStages := buildPaginationStages(paginateMap["latter_offset"], paginateMap["limit"]) + sectionProfessorPipeline = append(sectionProfessorPipeline, latterStages...) cursor, err := sectionCollection.Aggregate(ctx, sectionProfessorPipeline) if err != nil { @@ -352,7 +291,7 @@ func sectionProfessor(flag string, c *gin.Context) { return } - // Parse the array of courses + // Parse the array of professors if err = cursor.All(ctx, §ionProfessors); err != nil { respondWithInternalError(c, err) return @@ -360,4 +299,4 @@ func sectionProfessor(flag string, c *gin.Context) { respond(c, http.StatusOK, "success", sectionProfessors) -} +} \ No newline at end of file