diff --git a/src/envVariables.js b/src/envVariables.js
index cd382e7cac6856ec3f10fd7aa342c421ab535f53..f87bf727d4041962f3fdf753682dbc28de8b3745 100644
--- a/src/envVariables.js
+++ b/src/envVariables.js
@@ -34,6 +34,10 @@ const envVariables = {
         host: process.env.telemetry_service_host,
         endpoint: process.env.telemetry_service_endpoint,
         method: 'POST'
+    },
+    SUNBIRD_GOOGLE_SERVICE_ACCOUNT_CREDENTIAL: {
+        client_email: process.env.sunbird_google_oauth_client_email,
+        private_key: process.env.sunbird_google_oauth_private_key
     }
 }
 module.exports = envVariables;
diff --git a/src/helpers/csv-helper-util.js b/src/helpers/csv-helper-util.js
new file mode 100644
index 0000000000000000000000000000000000000000..7fe353c28e19084e5ac18477ac820a31e667ae15
--- /dev/null
+++ b/src/helpers/csv-helper-util.js
@@ -0,0 +1,252 @@
+const _ = require("lodash");
+const Papa = require("papaparse");
+global.XMLHttpRequest = require('xhr2');
+class CSVFileValidator {
+    csvFile = null;
+    csvData = null;
+    config = null;
+    allowedDynamicColumns = null;
+    flattenHierarchyObj = null;
+    response = null;
+
+    /**
+     * @param {Object} config
+     */
+    constructor(config, allowedDynamicColumns, flattenHierarchyObj = {}) {
+
+        this.config = config;
+        this.allowedDynamicColumns = allowedDynamicColumns;
+        this.flattenHierarchyObj = flattenHierarchyObj;
+    }
+
+    /**
+     * @param {Object} object
+     * @param {String} key
+     * @param {String} message
+     * @param {Array} data
+     * @private
+     */
+    handleError(object, key, message, data) {
+        if (_.isFunction(_.get(object, key))) {
+            object[key](...data);
+        } else {
+            this.response.inValidMessages.push(message);
+        }
+        return this.response;
+    }
+
+    /**
+     * @private
+     */
+    prepareDataAndValidateFile() {
+        const expectedColumns = this.config.headers.length;
+        const foundColumns = this.csvData[0].length;
+        // Check if extra columns are present other than specified
+        if (foundColumns > expectedColumns) {
+            const invalidColumns = _.map(_.range(expectedColumns, foundColumns), (number) => this.csvData[0][number] || `Column ${number}`)
+            return this.handleError(this.config, 'extraHeaderError', `Invalid data found in columns: ${invalidColumns.join(',')}`, [invalidColumns, expectedColumns, foundColumns]);
+        }
+
+        // One row for headers
+        const actualRows = this.csvData.length - 1;
+
+        // Empty rows or file validation
+        if (actualRows === 0) {
+            return this.handleError(this.config, 'noRowsError', `Empty rows found in the file`, []);
+        }
+
+        // Minimum rows validation
+        const minRows = _.get(this.config, 'minRows', 0);
+        if (minRows > 0 && (minRows > actualRows)) {
+            return this.handleError(this.config, 'minRowsError', `Expected min ${minRows} rows but found ${actualRows} rows in the file`, [minRows, actualRows]);
+        }
+
+        // Maximum rows validation
+        const maxRows = _.get(this.config, 'maxRows', 0);
+        if (maxRows > 0 && (maxRows < actualRows)) {
+            return this.handleError(this.config, 'maxRowsError', `Expected max ${maxRows} rows but found ${actualRows} rows in the file`, [maxRows, actualRows]);
+        }
+
+        // Required headers validation
+        const headers = this.config.headers;
+        const csvHeaders = _.first(this.csvData);
+        const headerNames = headers.map(row => {
+           row.name = _.get(row, 'name', '').trim();
+           return row.name;
+        });
+
+        // Missing headers
+        let difference = headerNames
+            .filter(x => !csvHeaders.includes(x))
+            .concat(csvHeaders.filter(x => !headerNames.includes(x)));
+
+        if (difference.length > 0) {
+            difference.map((column) => {
+                const valueConfig = headers.find(row => row.name === column);
+                if (valueConfig) {
+                    return this.handleError(valueConfig, 'headerError', `${column} header is missing`, [column]);
+                }
+            });
+        }
+
+        const uniqueValues = {};
+
+        // Iterate over each row in csv file
+        this.csvData.forEach((row, rowIndex) => {
+            // First row is headers so skip it
+            if (rowIndex === 0) return;
+
+            // No more rows in the file
+            if ((row.length < headers.length)) {
+                return ;
+            }
+
+            const rowData = {};
+            let hasError = false;
+
+            // Iterate over each column (header) in a row
+            headers.forEach((valueConfig, columnIndex) => {
+                // If header is not present
+                if (!valueConfig) {
+                    return;
+                }
+
+                // Get the column (header) value
+                let columnValue = (row[columnIndex] || '').trim();
+
+                // Default validation
+                if (valueConfig.isDefault && !columnValue) {
+                    columnValue = valueConfig.default;
+                }
+
+                const maxLength = _.get(valueConfig, 'maxLength', -1);
+
+                // Max length validation
+                if (typeof(columnValue) === 'string' && maxLength > -1) {
+                    if (columnValue.length > maxLength) {
+                        this.handleError(valueConfig, 'maxLengthError', `${valueConfig.name} contains more than ${maxLength} characters at row: ${rowIndex + 1}`, [valueConfig.name, rowIndex + 1, columnIndex + 1, maxLength, columnValue.length]);
+                        hasError = true;
+                        return;
+                    }
+                }
+
+
+                //  Required column value validation
+                if (valueConfig.required && !columnValue.length) {
+                    this.handleError(valueConfig, 'requiredError', `${valueConfig.name} is required in the (${rowIndex + 1}) row / (${columnIndex + 1}) column`, [valueConfig.name, rowIndex + 1, columnIndex + 1]);
+                    hasError = true;
+                    return;
+                }
+
+                // Custom column (header) validation
+                if (valueConfig.validate && !valueConfig.validate(columnValue)) {
+                    this.handleError(valueConfig, 'validateError', `${valueConfig.name} is not valid in the (${rowIndex + 1}) row / (${columnIndex + 1}) column`, [valueConfig.name, rowIndex + 1, columnIndex + 1]);
+                    hasError = true;
+                    return;
+                }
+
+                // Unique validation
+                if (valueConfig.unique) {
+                    const inputName = _.get(valueConfig, 'inputName');
+                    uniqueValues[inputName] = _.get(uniqueValues, `${inputName}`, []);
+
+                    // If value not present in array
+                    if (!uniqueValues[inputName].includes(columnValue)) {
+                        uniqueValues[inputName].push(columnValue);
+                    } else {
+                        this.handleError(valueConfig, 'uniqueError', `${valueConfig.name} has duplicate value in the (${rowIndex + 1}) row / (${columnIndex + 1}) column`, [valueConfig.name, rowIndex + 1, columnIndex + 1, columnValue]);
+                        hasError = true;
+                        return;
+                    }
+                }
+
+                // Optional validation
+                if (valueConfig.optional) {
+                    rowData[valueConfig.inputName] = columnValue;
+                }
+
+                // Url validation
+                if (valueConfig.isUrl && !_.isEmpty(columnValue)) {
+                    const urlPattern = new RegExp('^(https?:\\/\\/)?'+ // protocol
+                    '((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|'+ // domain name
+                    '((\\d{1,3}\\.){3}\\d{1,3}))'+ // OR ip (v4) address
+                    '(\\:\\d+)?(\\/[-a-z\\d%_.~+]*)*'+ // port and path
+                    '(\\?[;&a-z\\d%_.~+=-]*)?'+ // query string
+                    '(\\#[-a-z\\d_]*)?$','i'); // fragment locator
+                    const isUrl = !!urlPattern.test(columnValue);
+                    if (!isUrl) {
+                        this.handleError(valueConfig, 'urlError', `${valueConfig.name} has invalid url at (${rowIndex + 1}) row / (${columnIndex + 1}) column`, [valueConfig.name, rowIndex + 1, columnIndex + 1, columnValue]);
+                        hasError = true;
+                        return;
+                    }
+                }
+                // Array validation
+                if (valueConfig.isArray) {
+                    rowData[valueConfig.inputName] = _.isEmpty(columnValue) ? [] : columnValue.split(',')
+                        .map((value) => value.trim());
+                } else {
+                    rowData[valueConfig.inputName] = columnValue;
+                }
+
+                const inValues = _.get(valueConfig, 'in', []);
+
+                // In values validation
+                if (!_.isEmpty(inValues) && !valueConfig.isDefault) {
+                    const lowerValues = inValues.map((v) => _.toLower(v));
+                    if (!lowerValues.includes(_.toLower(columnValue))) {
+                        this.handleError(valueConfig, 'inError', `${valueConfig.name} has invalid value at row: ${rowIndex + 1}`, [valueConfig.name, rowIndex + 1, columnIndex + 1, valueConfig.in, columnValue]);
+                        hasError = true;
+                        return;
+                    }
+                }
+            });
+
+            if (hasError) {
+                return;
+            }
+
+            // Custom row validation
+            if (_.isFunction(this.config.validateRow)) {
+                this.config.validateRow(rowData, rowIndex + 1, this.flattenHierarchyObj);
+            }
+
+            // Push the rowData
+            this.response.data.push(rowData);
+        });
+
+        // Return response
+        return this.response;
+    }
+
+    /**
+     * @param {File} csvFile
+     * @private
+     */
+    validate(csvFilePath) {
+        this.csvFile = csvFilePath;
+        this.response = {
+            inValidMessages: [],
+            data: []
+        };
+
+        return new Promise((resolve, reject) => {
+            Papa.parse(csvFilePath, {
+                download: true,
+                complete: (results, file) => {
+                    this.csvData = results.data;
+                    const dynamicHeaders = !_.isEmpty(this.allowedDynamicColumns) ? // 10
+                    [...this.config.headers, ..._.filter(this.allowedDynamicColumns, columns => {
+                        return _.includes(_.first(this.csvData), columns.name);
+                    })] : [...this.config.headers];
+                    this.config.headers = _.uniqBy(dynamicHeaders, 'inputName');
+                    resolve(this.prepareDataAndValidateFile());
+                },
+                error: (error, file) => {
+                    reject({ error: error, file: file });
+                }
+            });
+        });
+    }
+}
+
+module.exports = CSVFileValidator;
\ No newline at end of file
diff --git a/src/helpers/googleOauthHelper.js b/src/helpers/googleOauthHelper.js
new file mode 100644
index 0000000000000000000000000000000000000000..a4519cdc11bde634b069f3097ecd31db3e025514
--- /dev/null
+++ b/src/helpers/googleOauthHelper.js
@@ -0,0 +1,63 @@
+'use strict';
+const { google } = require('googleapis');
+const fs = require('fs');
+const os = require('os');
+const path = require('path');
+const envVariables = require("../envVariables");
+
+class GoogleOauth {
+  auth = null;
+  constructor() {
+    this.auth = new google.auth.GoogleAuth({
+      credentials: {
+        "private_key": envVariables.SUNBIRD_GOOGLE_SERVICE_ACCOUNT_CREDENTIAL.private_key,
+        "client_email": envVariables.SUNBIRD_GOOGLE_SERVICE_ACCOUNT_CREDENTIAL.client_email
+      },
+      scopes: [
+        'https://www.googleapis.com/auth/drive',
+        'https://www.googleapis.com/auth/drive.appdata',
+        'https://www.googleapis.com/auth/drive.file',
+        'https://www.googleapis.com/auth/drive.metadata',
+        'https://www.googleapis.com/auth/drive.metadata.readonly',
+        'https://www.googleapis.com/auth/drive.photos.readonly',
+        'https://www.googleapis.com/auth/drive.readonly',
+      ],
+    });
+  }
+  async downloadFile(fileId, filepath = '') {
+    const drive = google.drive({ version: 'v3', auth: this.auth});
+    const readFileRes = await drive.files.get({ fileId: fileId });
+    return drive.files
+      .get({ fileId, alt: 'media' }, { responseType: 'stream' })
+      .then((res) => {
+        return new Promise((resolve, reject) => {
+          const filePath = filepath ? filepath : path.join(os.tmpdir(), readFileRes.data.name);
+          console.log(`writing to ${filePath}`);
+          const dest = fs.createWriteStream(filePath);
+          let progress = 0;
+          res.data
+            .on('end', () => {
+              console.log(`Done downloading file ==> ${fileId}`);
+              resolve({filePath, ...readFileRes.data});
+            })
+            .on('error', (err) => {
+              console.error('Error downloading file.');
+              reject(err);
+            })
+            .on('data', (d) => {
+              progress += d.length;
+              if (process.stdout.isTTY) {
+                process.stdout.clearLine();
+                process.stdout.cursorTo(0);
+                process.stdout.write(
+                  `Downloaded ${progress} bytes for ==> ${fileId}`
+                );
+              }
+            })
+            .pipe(dest);
+        });
+      });
+  }
+}
+
+module.exports = GoogleOauth;
\ No newline at end of file
diff --git a/src/middlewares/request.middleware.js b/src/middlewares/request.middleware.js
index 40d0ee082f528f2c4f7525cb66d62e0502732924..17f437a3b218d47374544fd2ab46a75a4128af02 100644
--- a/src/middlewares/request.middleware.js
+++ b/src/middlewares/request.middleware.js
@@ -7,6 +7,7 @@ var utilsService = require('../service/utilsService') //
 var contentProvider = require('sb_content_provider_util') // done
 // var { ApiInterceptor } = require('sb_api_interceptor') // done
 var ApiInterceptor = require('sb_api_interceptor') // done
+const loggerService = require('../service/loggerService');
 var _ = require('underscore') // done
 var reqMsg = messageUtil.REQUEST
 var responseCode = messageUtil.RESPONSE_CODE
diff --git a/src/package.json b/src/package.json
index 22b76eee89f2aaf7a6eba1feeb507b7a93b35d8e..de8178cc4015ed8b24ce6935796f48586dc1db65 100644
--- a/src/package.json
+++ b/src/package.json
@@ -10,6 +10,7 @@
   },
   "dependencies": {
     "@project-sunbird/telemetry-sdk": "0.0.24",
+    "async": "^3.2.1",
     "axios": "^0.19.2",
     "better-config": "^1.2.3",
     "bluebird": "^3.7.2",
@@ -28,6 +29,8 @@
     "express-cluster": "0.0.5",
     "express-fileupload": "^1.2.1",
     "express-validator": "^6.6.1",
+    "form-data": "^4.0.0",
+    "googleapis": "^88.2.0",
     "helmet": "^3.23.1",
     "http-errors": "^1.6.3",
     "image-size": "^0.9.7",
@@ -37,6 +40,7 @@
     "lodash": "^4.17.15",
     "morgan": "~1.9.1",
     "node-fetch": "2.6.1",
+    "papaparse": "^5.3.1",
     "pdfmake": "^0.1.70",
     "pg": "^7.18.2",
     "redis": "^3.0.2",
@@ -54,7 +58,8 @@
     "sb_telemetry_util": "file:libs/sb_telemetry_util",
     "sequelize": "^5.21.5",
     "underscore": "^1.9.2",
-    "uuid": "^3.2.1"
+    "uuid": "^3.2.1",
+    "xhr2": "^0.2.1"
   },
   "devDependencies": {
     "chai": "^4.2.0",
diff --git a/src/routes/qumlBulkRoutes.js b/src/routes/qumlBulkRoutes.js
index 0256c48c3cc63626f117d534406e452390c24275..2beb7eccc6e2e532b8fcbdbd5cb192619f13f766 100644
--- a/src/routes/qumlBulkRoutes.js
+++ b/src/routes/qumlBulkRoutes.js
@@ -1,11 +1,16 @@
 const qumlBulkUpload = require("../service/qumlBulkService");
+const requestMiddleware = require('../middlewares/request.middleware')
+const qumlRequestMiddleware = require('../validators/qumlBulkUploadValidators');
 const BASE_URL = '/question/v1';
 
 module.exports = function (app) {
-    app.route(BASE_URL + '/bulkUpload')
-      .post(qumlBulkUpload.bulkUpload)
+  app.route(BASE_URL + '/bulkupload/')
+    .post(requestMiddleware.gzipCompression(), requestMiddleware.createAndValidateRequestBody,
+    requestMiddleware.checkChannelID,
+    qumlRequestMiddleware.qumlBulkUploadValidator(),qumlRequestMiddleware.validate, qumlBulkUpload.bulkUpload);
   
     app.route(BASE_URL + '/bulkUploadStatus')
-      .post(qumlBulkUpload.qumlSearch)
+      .post(requestMiddleware.gzipCompression(), requestMiddleware.createAndValidateRequestBody,
+      qumlBulkUpload.qumlSearch);
   }
   
\ No newline at end of file
diff --git a/src/service/kafkaQumlConsumerService.js b/src/service/kafkaQumlConsumerService.js
index 9188dbd0c5776840d1f1f455c0d93492d4e4a84d..3a1fad3b4060e3b85949026828a5231275e1d500 100644
--- a/src/service/kafkaQumlConsumerService.js
+++ b/src/service/kafkaQumlConsumerService.js
@@ -1,11 +1,23 @@
+const fs = require("fs");
 var kafka = require("kafka-node");
+var FormData = require('form-data');
 const fetch = require("node-fetch");
 const logger = require("sb_logger_util_v2");
+const { v4: uuidv4 } = require("uuid");
 const loggerService = require("./loggerService");
 const messageUtils = require("../service/messageUtil");
+const GoogleOauth  = require("../helpers/googleOauthHelper");
 const responseCode = messageUtils.RESPONSE_CODE;
 const errorCodes = messageUtils.ERRORCODES;
 const envVariables = require("../envVariables");
+const async = require('async');
+const _ = require("lodash");
+const templateClassMap = {
+  "1" : 'mcq-vertical',
+  "2" : 'mcq-vertical-split',
+  "3" : 'mcq-horizontal'
+}
+const total_options = 4;
 const rspObj = {};
 
 const qumlConsumer = () => {
@@ -30,148 +42,12 @@ const qumlConsumer = () => {
     var consumerGroup = new ConsumerGroup(options, [
       envVariables.SUNBIRD_QUESTION_BULKUPLOAD_TOPIC
     ]);
-
-    consumerGroup
-      .on("message", function (message) {
-        const qumlArr = JSON.parse(message.value);
+    consumerGroup.on("message", function (message) {
         logger.info({ message: "Entered into the consumer service" });
-        let parsedJsonValue = qumlArr;
-        let createApiData = {
-          request: {
-            question: {
-              code: parsedJsonValue.question.code,
-              mimeType: parsedJsonValue.question.mimeType,
-              name: parsedJsonValue.question.name,
-              editorState: parsedJsonValue.question.editorState,
-              primaryCategory: parsedJsonValue.question.primaryCategory,
-              body: parsedJsonValue.question.body,
-              processId: parsedJsonValue.question.processId,
-              questionFileRefId: parsedJsonValue.question.questionFileRefId,
-            },
-          },
-        };
-        //fetch call for creating a question.
-        fetch(`${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/question/v1/create`, {
-            method: "POST", // or 'PUT'
-            headers: {
-              "Content-Type": "application/json",
-            },
-            body: JSON.stringify(createApiData),
-          })
-          .then((response) => response.json())
-          .then((createResponseData) => {
-            let updateApiData = createResponseData;
-            delete parsedJsonValue.question.mimeType;
-            delete parsedJsonValue.question.code;
-            delete parsedJsonValue.question.processId;
-            delete parsedJsonValue.question.questionId;
-            let updateData = { request: parsedJsonValue };
-            //if success fetch call for updating question.            
-            if (
-              createResponseData.responseCode.toLowerCase() ===
-              "OK".toLowerCase()
-            ) {
-              fetch(
-                `${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/question/v1/update/${createResponseData.result.identifier}`,
-                {
-                  method: "PATCH", // or 'PUT'
-                  headers: {
-                    "Content-Type": "application/json",
-                  },
-                  body: JSON.stringify(updateData),
-                }
-              )
-                .then((response) => response.json())
-                .then((updateResponseData) => {
-                  let reviewData = { request: { question: {} } };
-                  //if success fetch call for reviewing question.
-                  if (
-                    updateResponseData.responseCode.toLowerCase() ===
-                    "OK".toLowerCase()
-                  ) {
-                    fetch(
-                      `${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/question/v1/review/${updateApiData.result.identifier}`,
-                      {
-                        method: "POST", // or 'PUT'
-                        headers: {
-                          "Content-Type": "application/json",
-                        },
-                        body: JSON.stringify(reviewData),
-                      }
-                    )
-                      .then((response) => response.json())
-                      .then((reviewResponseData) => {
-                        let publishApiData = { request: { question: {} } };
-                        //if success fetch call for publishing question.
-                        if (
-                          reviewResponseData.responseCode.toLowerCase() ===
-                          "OK".toLowerCase()
-                        ) {
-                          fetch(
-                            `${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/question/v1/publish/${updateApiData.result.identifier}`,
-                            {
-                              method: "POST", // or 'PUT'
-                              headers: {
-                                "Content-Type": "application/json",
-                              },
-                              body: JSON.stringify(publishApiData),
-                            }
-                          )
-                            .then((response) => response.json())
-                            .then((publishResponseData) => {
-                              if (
-                                publishResponseData.responseCode.toLowerCase() ===
-                                "OK".toLowerCase()
-                              ) {
-                                updateResponse(
-                                  updateApiData.result.identifier,
-                                  `Successfully uploaded the question for the identifier:${updateApiData.result.identifier}`
-                                );
-                              }
-                            })
-                            .catch((error) => {
-                              console.error("Error:", error);
-                              updateResponse(
-                                updateApiData.result.identifier,
-                                `Something went wrong while Publishing the question`
-                              );
-                            });
-                        } 
-                      })
-                      .catch((error) => {
-                        console.error("Error:", error);
-                        logger.error({
-                          message:
-                            "Something Went wrong while reviewing the questions",
-                        });
-                        updateResponse(
-                          updateApiData.result.identifier,
-                          `Something Went wrong while reviewing the questions: ${error}`
-                        );
-                      });
-                  }
-                })
-                .catch((error) => {
-                  console.error("Error:", error);
-                  logger.error({
-                    message: "Something Went Wrong While Updating the question",
-                  });
-                  updateResponse(
-                    updateApiData.result.identifier,
-                    `Something Went Wrong While Updating the question: ${error}`
-                  );
-                });
-            }
-          })
-          .catch((error) => {
-            console.error("Error:", error);
-            logger.error({
-              message: `Something Went Wrong While Creating the question ${error}`,
-            });
-          });
-      
-        })
-      .on("error", function (message) {
+        let parsedJsonValue = JSON.parse(message.value);;
+        console.log("Kafka consumer :: ", parsedJsonValue);
+        initQuestionCreateProcess(parsedJsonValue);
+      }).on("error", function (message) {
         client.close();
       });
   } catch (error) {
@@ -184,24 +60,452 @@ const qumlConsumer = () => {
   }
 };
 
+const initQuestionCreateProcess = (questionData) => {
+  logger.info({ message: "Question creating process started" });
+  async.waterfall([
+    async.apply(startDownloadFileProcess, questionData),
+    async.apply(prepareQuestionBody),
+    async.apply(createQuestion),
+    async.apply(reviewQuestion, questionData.status),
+    async.apply(publishQuestion, questionData.status),
+    async.apply(linkQuestionToQuestionSet, questionData)
+  ], function (err, result) {
+      if(err) { 
+        return logger.error(
+          {
+            message: `Something Went Wrong While Creating the question ${JSON.stringify(err)}`,
+          },
+          err
+        ); 
+      }
+      console.log('initQuestionCreateProcess :: SUCCESS ::', JSON.stringify(result));
+  });
+};
+
+const startDownloadFileProcess = (question, outerCallback) => {
+  const filesToDownload = _.omitBy(_.pick(question, ['questionImage','option1Image', 'option2Image', 'option3Image', 'option4Image']), _.isEmpty);
+  if(_.isEmpty(filesToDownload)) {
+    return outerCallback(null, question);
+  }
+  const downloadedFiles = {};
+  async.eachOfSeries(filesToDownload, function (data, key, callback) {
+    const fileId = getIdFromUrl(data);
+    if(_.has(downloadedFiles,fileId)) {
+      question[key] = _.get(downloadedFiles,fileId);
+      console.log(key, " :: File already downloaded :: ", data)
+      callback(null, 'File');
+    } else {
+      async.waterfall([
+        async.apply(downloadFile, data),
+        async.apply(createAssest, question),
+        async.apply(uploadAsset),
+        async.apply(deleteFileFromTemp),
+      ], function (err, result) {
+          if(err) { 
+            return callback(err); 
+          }
+          downloadedFiles[fileId] = result.artifactUrl;
+          question[key] = result.artifactUrl;
+          callback(null, result);
+      });
+    }
+  }, function (error) {
+    console.log("===================error", error);
+    if (error) {
+      outerCallback(error);
+    } else {
+      outerCallback(null, question);
+    }
+  });
+}
+
+
+const downloadFile = (data, callback) => {
+  const googleAuth =  new GoogleOauth();
+  const fileId = getIdFromUrl(data);
+  googleAuth.downloadFile(fileId).then((result) => {
+    console.log("RESULT ::", result);
+    callback(null, result);
+  }).catch((error) => {
+    callback(error);
+  })
+}
+
+const createAssest = (question, data, callback) => {
+  const extension = path.extname(data.name);  
+  const filename = path.basename(data.name, extension);
+  const mediaType = _.first(_.split(data.mimeType, '/'));
+    let reqBody = {
+      "request": {
+          "asset": {
+              "name": filename,
+              "code":uuidv4(),
+              "mimeType": data.mimeType,
+              "primaryCategory": "asset",
+              "mediaType": mediaType
+          }
+      }
+  };
+  console.log("createAssest request Body =====>", reqBody);
+  fetch(`${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/asset/v1/create`, {
+      method: "POST", // or 'PUT'
+      headers: {
+        "X-Channel-ID": question.channel,
+        "Content-Type": "application/json",
+        "Authorization" : `Bearer ${envVariables.SUNBIRD_PORTAL_API_AUTH_TOKEN}`
+      },
+      body: JSON.stringify(reqBody),
+    })
+    .then((response) => response.json())
+    .then((assetResponseData) => {
+      if (assetResponseData.responseCode && _.toLower(assetResponseData.responseCode) === "ok") {
+        data['identifier'] = assetResponseData.result.identifier;
+        callback(null, data);
+      } else {
+        console.log("assetResponseData", assetResponseData);
+        callback(assetResponseData);
+      }
+    })
+    .catch((error) => {
+      console.log("catchcatch", error);
+      logger.error({
+        message: `Error while creating the assest ::  ${JSON.stringify(error)}`,
+      });
+      callback(error);
+    });
+} 
+
+const uploadAsset = (data, callback) => {
+  console.log("uploadAsset : ==> ", data);
+  var formdata = new FormData();
+  formdata.append("file", fs.createReadStream(data.filePath), data.name);
+  fetch(`${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/asset/v1/upload/${data.identifier}`, {
+      method: "POST", // or 'PUT'
+      headers: {
+        "Authorization" : `Bearer ${envVariables.SUNBIRD_PORTAL_API_AUTH_TOKEN}`
+      },
+      body: formdata,
+    })
+    .then((response) => response.json())
+    .then((uploadResponseData) => {
+      console.log("uploadResponseData ::: ==> ", JSON.stringify(uploadResponseData));
+      if (uploadResponseData.responseCode && _.toLower(uploadResponseData.responseCode) === "ok") {
+        data['artifactUrl'] = uploadResponseData.result.artifactUrl;
+        callback(null, data);
+      } else {
+        callback(uploadResponseData);
+      }
+    })
+    .catch((error) => {
+      logger.error({
+        message: `Error while uploading the assest ::  ${JSON.stringify(error)}`,
+      });
+      callback(error);
+    });
+} 
+
+const deleteFileFromTemp = (data, callback) => {
+  console.log("deleteFileFromTemp :: ===>", data);
+  fs.unlink(data.filePath, function(err) {
+    if(err && err.code == 'ENOENT') {
+        console.info("File doesn't exist, won't remove it. :: ", data.filePath);
+    } else if (err) {
+        // other errors, e.g. maybe we don't have enough permission
+        console.error("Error occurred while trying to remove file :: ", data.filePath);
+    } else {
+        console.info("File deleted successfully :: ", data.filePath);
+    }
+  });
+  return callback(null, data);
+} 
+
+
+// gets the part after /d/ and up until the next /, 
+// which is how the document URLs always contain their IDs. If no match is found for this, 
+// then we simply return the original param, which is assumed to be the ID.
+const getIdFromUrl = (url) => {
+  var parts = url.match(/\/d\/(.+)\//);
+  if (parts == null || parts.length < 2) {
+    return url;
+  } else {
+    return parts[1];
+  }
+}
+
+const prepareQuestionBody = (question, callback) => {
+  let metadata = {
+    code : uuidv4(),
+    mimeType: 'application/vnd.sunbird.question',
+    editorState: {},
+    body: mergeQuestionTextAndImage(question.questionText, question.questionImage)
+  };
+  const questionType  = question.questionType.toLowerCase();
+  if (questionType === 'mcq') {
+    metadata = _.assign(metadata, prepareMcqBody(question));
+  }
+
+  metadata = _.assign(metadata, _.pick(question, ['additionalCategories', 'board', 'medium', 'gradeLevel', 
+  'subject', 'topic', 'learningOutcome','skill','keywords','audience', 'author', 'copyright', 'license', 'attributions',
+  'channel', 'framework', 'topic', 'createdBy', 'questionFileRefId', 'processId']));
+  metadata.editorState.question = mergeQuestionTextAndImage(question.questionText, question.questionImage);
+  metadata = _.omitBy(metadata, _.isEmpty);
+  console.log("prepareQuestionBody :: => ", metadata);
+  callback(null, metadata);
+}
+
+const mergeQuestionTextAndImage = (questionText, questionImage) => {
+  const questionTemplate = '<figure class=\"image image-style-align-left resize-25\"><img src=\"{questionImage}\" alt=\"\"></figure><p><br>{questionText}</p>'
+  if(!_.isEmpty(questionImage)) {
+    return questionTemplate.replace('{questionImage}', questionImage)
+    .replace('{questionText}', questionText);
+  } else {
+    return `<p>${questionText}</p>`
+  }
+}
+
+const prepareMcqBody = (question) => {
+  const correctAnswer = Number(question.answerNo);
+  const templateId = templateClassMap[question.optionLayout];
+  let options = [];
+  let interactOptions = [];
+ _.forEach(_.range(total_options), (opt, index) => {
+    let optionValue = question[`option${index + 1}`];
+    let optionImage = question[`option${index + 1}Image`];
+    if (!_.isEmpty(optionValue) || !_.isEmpty(optionImage)) {
+      options.push({ 
+        answer: correctAnswer === (index + 1), 
+        value: { body: mergeQuestionTextAndImage(optionValue, optionImage), value: index } 
+      });
+      interactOptions.push({ label:mergeQuestionTextAndImage(optionValue, optionImage), value: index });
+    }
+  });
+  let metadata = {
+    body: getMcqQuestionHtmlBody(question, templateId),
+    templateId: templateId,
+    name: question.name ? question.name :'Multiple Choice Question',
+    responseDeclaration: getResponseDeclaration(question),
+    interactionTypes: ['choice'],
+    interactions: {
+      response1: {
+        type: 'choice',
+        options: interactOptions
+      }
+    },
+    editorState: {
+      options
+    },
+    qType: _.toUpper(question.questionType),
+    primaryCategory: 'Multiple Choice Question'
+  };
+  return metadata;
+}
+
+const getMcqQuestionHtmlBody = (question, templateId) => {
+  const mcqBodyWithoutImage = '<div class=\'question-body\'><div class=\'mcq-title\'>{questionText}</div><div data-choice-interaction=\'response1\' class=\'{templateClass}\'></div></div>';
+  const mcqBodyWithImage = "<div class='question-body'><div class='mcq-title'><figure class=\"image image-style-align-left resize-25\"><img src=\"{questionImage}\" alt=\"\" ></figure><p><br>{questionText}</p></div><div data-choice-interaction='response1'class=\'{templateClass}\'></div></div>";
+  const mcqBody = question.questionImage ? mcqBodyWithImage : mcqBodyWithoutImage;
+  const questionBody = mcqBody.replace('{templateClass}', templateId)
+    .replace('{questionText}', question.questionText)
+    .replace('{questionImage}', question.questionImage);
+  return questionBody;
+}
+
+const getResponseDeclaration = (question) => {
+  const responseDeclaration = {
+    response1: {
+      maxScore: 1,
+      cardinality: 'single',
+      type: 'integer',
+      correctResponse: {
+        value: question.answerNo,
+        outcomes: { SCORE: 1 }
+      }
+    }
+  };
+  return responseDeclaration;
+}
+
+const createQuestion = (questionBody, callback) => {
+  let createApiData = {
+    "request": {
+        "question": questionBody
+    }
+  };
+  //fetch call for creating a question.
+  console.log('createQuestionBody::' , JSON.stringify(createApiData));
+  fetch(`${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/question/v1/create`, {
+      method: "POST", // or 'PUT'
+      headers: {
+        "Content-Type": "application/json",
+        "Authorization" : `Bearer ${envVariables.SUNBIRD_PORTAL_API_AUTH_TOKEN}`
+      },
+      body: JSON.stringify(createApiData),
+    })
+    .then((response) => response.json())
+    .then((createResponseData) => {
+      if (createResponseData.responseCode && _.toLower(createResponseData.responseCode) === "ok") {
+        callback(null, createResponseData);
+      } else {
+        callback(createResponseData);
+      }
+    })
+    .catch((error) => {
+      logger.error({
+        message: `Error while creating the question ::  ${JSON.stringify(error)}`,
+      });
+      callback(error);
+  });
+
+}
+
+const reviewQuestion = (status, questionRes, callback) => {
+
+  if(status && _.toLower(status) === 'draft') {
+    return callback(null, questionRes);
+  }
+
+  let reviewData = { request: { question: {} } };
+  fetch(`${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/question/v1/review/${questionRes.result.identifier}`,
+    {
+      method: "POST", // or 'PUT'
+      headers: {
+        "Content-Type": "application/json",
+        "Authorization" : `Bearer ${envVariables.SUNBIRD_PORTAL_API_AUTH_TOKEN}`
+      },
+      body: JSON.stringify(reviewData),
+    }
+  )
+    .then((response) => response.json())
+    .then((reviewResponseData) => {
+      console.log("reviewQuestion response:: ", reviewResponseData);
+      if (reviewResponseData.responseCode && _.toLower(reviewResponseData.responseCode) === "ok") {
+        callback(null, reviewResponseData);
+      } else {
+        callback(reviewResponseData);
+      }
+    })
+    .catch((error) => {
+      logger.error({
+        message: `Error while reviewing the question ::  ${JSON.stringify(error)}`,
+      });
+      updateResponse(
+        questionRes.result.identifier,
+        `Something Went wrong while reviewing the questions: ${error}`
+      );
+      callback(error);
+    });
+}
+
+const publishQuestion = (status, questionRes, callback) => {
+  if(status && _.includes(['draft', 'review'], _.toLower(status))) {
+    return callback(null, questionRes);
+  }
+  let publishApiData = { request: { question: {} } };
+  fetch(
+    `${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/question/v1/publish/${questionRes.result.identifier}`,
+    {
+      method: "POST", // or 'PUT'
+      headers: {
+        "Content-Type": "application/json",
+        "Authorization" : `Bearer ${envVariables.SUNBIRD_PORTAL_API_AUTH_TOKEN}`
+      },
+      body: JSON.stringify(publishApiData),
+    })
+    .then((response) => response.json())
+    .then((publishResponseData) => {
+      console.log("reviewQuestion response:: ", publishResponseData);
+      if (publishResponseData.responseCode && _.toLower(publishResponseData.responseCode) === "ok") {
+        callback(null, publishResponseData);
+      } else {
+        callback(publishResponseData);
+      }
+    })
+    .catch((error) => {
+      logger.error({
+        message: `Error while publishing the question ::  ${JSON.stringify(error)}`,
+      });
+      updateResponse(
+        questionRes.result.identifier,
+        `Something went wrong while Publishing the question`
+      );
+      callback(error);
+    });
+}
+
+const linkQuestionToQuestionSet = (questionData, questionRes, callback) => {
+  if(!_.has(questionData, 'questionSetId') && _.isEmpty(questionData.questionSetSectionId)) {
+    return callback(null, 'DONE');
+  }
+  let publishApiData = { 
+    request: { 
+      questionset: { 
+        "rootId" : questionData.questionSetId, 
+        ...(!_.isEmpty(questionData.questionSetSectionId) && { collectionId: questionData.questionSetSectionId}),
+        "children": [questionRes.result.identifier] } }
+  };
+  fetch(
+    `${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/questionset/v1/add`,
+    {
+      method: "PATCH", // or 'PUT'
+      headers: {
+        "Content-Type": "application/json",
+        "Authorization" : `Bearer ${envVariables.SUNBIRD_PORTAL_API_AUTH_TOKEN}`
+      },
+      body: JSON.stringify(publishApiData),
+    })
+    .then((response) => response.json())
+    .then((linkResponseData) => {
+      if (linkResponseData.responseCode && _.toLower(linkResponseData.responseCode) === "ok") {
+        // updateResponse(
+        //   questionRes.result.identifier,
+        //   `Successfully linked the question for the identifier:${questionRes.result.identifier}`
+        // );
+        callback(null, linkResponseData);
+      } else {
+        logger.error({
+          message: `Error while linking the question ::  ${JSON.stringify(error)}`,
+        });
+        updateResponse(
+          questionRes.result.identifier,
+          `Something went wrong while linking the question`
+        );
+        callback(linkResponseData);
+      }
+    })
+    .catch((error) => {
+      logger.error({
+        message: `Error while linking the question ::  ${JSON.stringify(error)}`,
+      });
+      updateResponse(
+        questionRes.result.identifier,
+        `Something went wrong while linking the question`
+      );
+      callback(error);
+    });
+}
+
+
 //function to update the status of all other fetch calls mentioned above using question update.
 const updateResponse = (updateData, updateMessage) => {
   const updateNewData = {
     request: {
       question: {
         questionUploadStatus: updateMessage,
-      },
-    },
+      }
+    }
   };
+  console.log("updateResponse :: request Body::", updateNewData);
   fetch(`${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/question/v1/update/${updateData}`, {
       method: "PATCH", // or 'PUT'
       headers: {
         "Content-Type": "application/json",
+        "Authorization" : `Bearer ${envVariables.SUNBIRD_PORTAL_API_AUTH_TOKEN}`
       },
       body: JSON.stringify(updateNewData),
     })
     .then((response) => response.json())
     .then((updateResult) => {
+      console.log("updateResult :: ======> ", updateResult);
       rspObj.responseCode = "OK";
       rspObj.result = {
         questionStatus: `Successfully updated the question data for the identifier: ${updateData}`,
@@ -228,4 +532,4 @@ const updateResponse = (updateData, updateMessage) => {
 module.exports = {
   qumlConsumer,
   updateResponse
-};
+};
\ No newline at end of file
diff --git a/src/service/messageUtil.js b/src/service/messageUtil.js
index 72b6a65cad0bbcac1b29538ef6e99065f5bead39..4fa30cbf5674dcf6cc85bc6464bbdaaf42ae4f62 100644
--- a/src/service/messageUtil.js
+++ b/src/service/messageUtil.js
@@ -735,10 +735,13 @@ exports.PROGRAM = {
   },
 QUML_BULKUPLOAD: {
     EXCEPTION_CODE: 'QUMLBLK',
-    MISSING_CODE: 'ERR_QUML_BULKUPLOAD',
-    MISSING_MESSAGE: 'Required fields like user_id or publisher_id or organization_id is missing',
+    VALIDATION_MESSAGE: 'Validation Errors',
+    MISSING_CODE: 'REQUIRED_FIELD_MISSING',
+    MISSING_MESSAGE: 'Errors are found in the file. Please correct and upload again',
     FAILED_CODE: 'ERR_QUML_BULKUPLOAD_FAILED',
     FAILED_MESSAGE: 'Unable to perform quml bulk upload',
+    HIERARCHY_FAILED_CODE: 'ERR_QUESTIONSET_HIERARCHY_FAILED',
+    HIERARCHY_FAILED_MESSAGE: 'Get questionset hierarchy failed',
     INFO: 'Quml bulk upload'
   },
   QUML_BULKSTATUS: {
diff --git a/src/service/qumlBulkService.js b/src/service/qumlBulkService.js
index ea0e7f11cd41d17a89ecd38fd25c932778adb8e1..d1acec9066cd647d74e467b7f1713604875b81b4 100644
--- a/src/service/qumlBulkService.js
+++ b/src/service/qumlBulkService.js
@@ -1,7 +1,10 @@
 const fs = require("fs");
 const fetch = require("node-fetch");
+const _ = require("lodash");
 const { v4: uuidv4 } = require("uuid");
 const KafkaService = require("../helpers/kafkaUtil");
+const { errorResponse, loggerError, successResponse } = require('../helpers/responseUtil');
+const CSVFileValidator = require("../helpers/csv-helper-util");
 const logger = require("sb_logger_util_v2");
 const loggerService = require("./loggerService");
 const messageUtils = require("./messageUtil");
@@ -9,179 +12,214 @@ const responseCode = messageUtils.RESPONSE_CODE;
 const programMessages = messageUtils.PROGRAM;
 const errorCodes = messageUtils.ERRORCODES;
 const envVariables = require("../envVariables");
-const rspObj = {};
 const csv = require("express-csv");
+let bulkUploadErrorMsgs;
+let allowedDynamicColumns = [];
+const bulkUploadConfig = {
+  maxRows: 300,
+};
+const max_options_limit = 4;
+let uploadCsvConfig;
 
 const bulkUpload = async (req, res) => {
+  bulkUploadErrorMsgs = []
+  const rspObj = req.rspObj
+  const reqHeaders = req.headers;
   const logObject = {
     traceId: req.headers["x-request-id"] || "",
     message: programMessages.QUML_BULKUPLOAD.INFO,
-  };
-  let totalQuestionLength = 0;
-  let errorArray = [];
+  };  
   let pId = uuidv4();
-  let successArray = [];
   let qumlData;
-  const fileType = req.files.File.mimetype;
-  const fileName = req.files.File.name;
+  setBulkUploadCsvConfig();
+  const csvFileURL = _.get(req, 'body.request.fileUrl', null);
   loggerService.entryLog("Api to upload questions in bulk", logObject);
-  //validating the file whether the incoming file is json or not
-  if (fileType !== "application/json") {
-    rspObj.errMsg = "The File  is not in JSON format!!";
-    rspObj.responseCode = responseCode.SERVER_ERROR;
-    logger.error({ message: "The File  is not in JSON format!!" });
-    res
-      .status(400)
-      .send(
-        { message: "The File  is not in JSON format!!", rspObj },
-        errorCodes.CODE2
-      );
-  } else {
-    const AppendData = req.files.File.data.toString("utf8");
-    fs.writeFile(`${fileName}.json`, AppendData, (err) => {
-      if (err) {
-        rspObj.errMsg = "Something Went Wrong While Writing the file";
-        rspObj.responseCode = responseCode.SERVER_ERROR;
-        logger.error(
-          { message: "Something Went Wrong While Writing the file", rspObj },
-          errorCodes.CODE2
-        );
-        res
-          .status(400)
-          .send(
-            { message: "Something Went Wrong While Writing the file", rspObj },
-            errorCodes.CODE2
-          );
-      } else {
-        logger.info({ message: "File has been written Successfully" });
+  const errCode = programMessages.EXCEPTION_CODE+'_'+programMessages.QUML_BULKUPLOAD.EXCEPTION_CODE
+  logger.info({ message: "Qeustionset ID ===>", questionSetID: _.get(req, 'body.request.questionSetId', null)});
+  getQuestionSetHierarchy(_.get(req, 'body.request.questionSetId'), reqHeaders, (err, data) => {
+    if(err) {
+      console.log('Error fetching hierarchy for questionSet', JSON.stringify(err));
+      rspObj.errCode = _.get(err, 'params.err') || programMessages.QUML_BULKUPLOAD.HIERARCHY_FAILED_CODE;
+      rspObj.errMsg = _.get(err, 'params.errmsg') || programMessages.QUML_BULKUPLOAD.HIERARCHY_FAILED_MESSAGE;
+      rspObj.responseCode = _.get(err, 'responseCode') || responseCode.SERVER_ERROR;
+      loggerError(rspObj,errCode+errorCodes.CODE2);
+      loggerService.exitLog({responseCode: rspObj.responseCode}, logObject);
+      return res.status(400).send(errorResponse(rspObj,errCode+errorCodes.CODE2));
+    }
+    const flattenHierarchyObj=  getFlatHierarchyObj(data);
+    const csvValidator = new CSVFileValidator(uploadCsvConfig, allowedDynamicColumns, flattenHierarchyObj);
+    csvValidator.validate(csvFileURL).then((csvData) => {
+      if (!_.isEmpty(bulkUploadErrorMsgs)) {
+        rspObj.errCode = programMessages.QUML_BULKUPLOAD.MISSING_CODE;
+        rspObj.errMsg = programMessages.QUML_BULKUPLOAD.MISSING_MESSAGE;
+        rspObj.responseCode = responseCode.CLIENT_ERROR;
+        rspObj.result = { messages: bulkUploadErrorMsgs };
+        loggerError(rspObj,errCode+errorCodes.CODE3);
+        loggerService.exitLog({responseCode: rspObj.responseCode}, logObject);
+        return res.status(400).send(errorResponse(rspObj,errCode+errorCodes.CODE3));
       }
-    });
-    await readfile(fileName)
-      .then((ele) => {
-        qumlData = ele;
-      })
-      .catch((err) => {
-        rspObj.errMsg = "Something went Wrong while file reading";
-        rspObj.responseCode = responseCode.SERVER_ERROR;
-        logger.error({
-          message: "Something went Wrong while file reading",
-          rspObj,
-        });
-        res
-          .status(400)
-          .send(
-            {
-              message: "Something went Wrong while file reading",
-              errorData: err,
-              rspObj,
-            },
-            errorCodes.CODE2
-          );
+      qumlData = csvData.data;
+      _.forEach(qumlData, (question) => {
+        question = prepareQuestionData(question, req);
+        question['questionSetSectionId'] = flattenHierarchyObj[question.level1];
+        question["processId"] = pId;
+        console.log("Prepared Question body : =====>", question)
+        sendRecordToKafkaTopic(question);
       });
-    totalQuestionLength = qumlData.length;
-    //validating whether the userId,publisherId and organizationId is empty or not;
-    for (let i = 0; i < qumlData.length; i++) {
-      if (qumlData[i].userId === "") {
-        errorArray.push(
-          `${programMessages.QUML_BULKUPLOAD.MISSING_MESSAGE}: ${JSON.stringify(
-            qumlData[i]
-          )}`
-        );
-      } else if (qumlData[i].publisherId === "") {
-        errorArray.push(
-          `${programMessages.QUML_BULKUPLOAD.MISSING_MESSAGE}:${JSON.stringify(
-            qumlData[i]
-          )}`
-        );
-      } else if (qumlData[i].organizationId === "") {
-        errorArray.push(
-          `${programMessages.QUML_BULKUPLOAD.MISSING_MESSAGE}: ${JSON.stringify(
-            qumlData[i]
-          )}`
-        );
-      } else {
-        qumlData[i].question["processId"] = pId;
-        qumlData[i].question["questionFileRefId"] = uuidv4();
-        successArray.push(`${JSON.stringify(qumlData[i])}`);
-        //calling the kafka producer here
-        KafkaService.sendRecordWithTopic(
-          qumlData[i],
-          envVariables.SUNBIRD_QUESTION_BULKUPLOAD_TOPIC,
-          function (err, response) {
-            if (err) {
-              logger.error(
-                {
-                  message: "Something Went wrong while producing kafka",
-                  errorData: err,
-                },
-                errorCodes.CODE2
-              );
-            }
-          }
+      logger.info({ message: "Bulk Upload process has started successfully for the process Id", pId});
+      rspObj.responseCode = responseCode.SUCCESS;
+      rspObj.result = { processId: pId};
+      loggerService.exitLog({responseCode: rspObj.responseCode}, logObject);
+      return res.status(200).send(successResponse(rspObj))
+    }).catch(err => {
+      console.log('Error while validating the CSV file :: ', JSON.stringify(err));
+      rspObj.errCode = programMessages.QUML_BULKUPLOAD.FAILED_CODE;
+      rspObj.errMsg = programMessages.QUML_BULKUPLOAD.FAILED_MESSAGE;
+      rspObj.responseCode = responseCode.SERVER_ERROR;
+      loggerError(rspObj,errCode+errorCodes.CODE3);
+      loggerService.exitLog({responseCode: rspObj.responseCode}, logObject);
+      return res.status(400).send(errorResponse(rspObj,errCode+errorCodes.CODE3));
+    });
+  })
+};
+
+const sendRecordToKafkaTopic = (question) => {
+  const errCode = programMessages.EXCEPTION_CODE+'_'+programMessages.QUML_BULKUPLOAD.EXCEPTION_CODE
+  KafkaService.sendRecordWithTopic(question, envVariables.SUNBIRD_QUESTION_BULKUPLOAD_TOPIC,
+    (err, response) => {
+      if (err) { 
+        logger.error(
+          {
+            message: "Something Went wrong while producing kafka event",
+            errorData: err,
+          },
+          errCode+errorCodes.CODE4
         );
       }
+      console.log('sendRecordWithTopic :: SUCCESS :: ', response);
     }
-    fs.unlink(`${fileName}.json`, function (err, res) {
-      if (err) {
-        logger.error({
-          message: "Something Went wrong while performing unlink of the file ",
-          errorData: err,
-        });
-      } else {
-        logger.info({
-          message: "Successfully unlinked the file",
-          resData: res,
-        });
-      }
-      //Do whatever else you need to do here
-    });
-  }
-  rspObj.responseCode = "OK";
-  rspObj.result = {
-    questionStatus: `Bulk Upload process has started successfully for the process Id : ${pId}`,
-    data: {
-      "Total no of questions": totalQuestionLength,
-      "No of questions getting processed": successArray.length,
-      "No of questions With issues": errorArray.length,
-      "Questions With wrong message": errorArray,
-    },
-  };
-  logger.info({
-    message: "Bulk Upload process has started successfully for the process Id",
-    pId,
-  });
-  loggerService.exitLog(
-  `Bulk Upload process has started successfully for the process Id : ${pId}`,
-    rspObj
   );
-  res
-  .status(200)
-  .send(
-    {message: `Bulk Upload process has started successfully for the process Id : ${pId}`,rspObj}
-    );
-};
+}
+
+const setBulkUploadCsvConfig = () => {
+  const headerError = (headerName) => {
+    setError(`${headerName} header is missing.`);
+  };
+  const requiredError = (headerName, rowNumber, columnNumber) => {
+    setError(`${headerName} value is missing at row: ${rowNumber}`);
+  };
+  const uniqueError = (headerName, rowNumber, columnNumber, value) => {
+    setError(`${headerName} has duplicate value at row: ${rowNumber}`);
+  };
+  const inError = (headerName, rowNumber, columnNumber, acceptedValues, value) => {
+    setError(`${headerName} has invalid value at row: ${rowNumber}`);
+  };
+  const urlError = (headerName, rowNumber, columnNumber, value) => {
+    setError(`${headerName} has invalid url value at row: ${rowNumber}`);
+  };
+  const maxLengthError = (headerName, rowNumber, columnNumber, maxLength, length) => {
+    setError(`Length of ${headerName} exceeds ${maxLength}. Please give a shorter ${headerName} at row: ${rowNumber}`);
+  };
+  const extraHeaderError = (invalidColumns, expectedColumns, foundColumns) => {
+    setError(`Invalid data found in columns: ${invalidColumns.join(',')}`);
+  };
 
-const readfile = (filename) => {
-  return new Promise((resolve, reject) => {
-    fs.readFile(`${filename}.json`, "utf8", (err, jsonString) => {
-      if (err) {
-        reject(err);
-      } else {
-        var qumlJsonData = JSON.parse(jsonString);
-        resolve(qumlJsonData);
+  const maxRowsError = (maxRows, actualRows) => {
+    setError(`Expected max ${maxRows} rows but found ${actualRows} rows in the file`);
+  };
+  const noRowsError = () => {
+    setError(`Empty rows in the file`);
+  };
+
+  const headers = [
+    { name: 'Name of the Question', inputName: 'name', maxLength: 120, required: true, requiredError, headerError, maxLengthError },
+    { name: 'QuestionText', inputName: 'questionText', headerError, maxLength: 1000, maxLengthError },
+    { name: 'QuestionImage', inputName: 'questionImage', headerError, isUrl: true, urlError},
+    { name: 'Option Layout', inputName: 'optionLayout', required: true, requiredError, headerError, in: ['1', '2', '3'], inError },
+    { name: 'Option1', inputName: 'option1', headerError, maxLength: 1000, maxLengthError },
+    { name: 'Option1Image', inputName: 'option1Image', headerError, isUrl: true, urlError},
+    { name: 'Option2', inputName: 'option2', headerError, maxLength: 1000, maxLengthError },
+    { name: 'Option2Image', inputName: 'option2Image', headerError, isUrl: true, urlError},
+    { name: 'Option3', inputName: 'option3', headerError, maxLength: 1000, maxLengthError },
+    { name: 'Option3Image', inputName: 'option3Image', headerError},
+    { name: 'Option4', inputName: 'option4', headerError, maxLength: 1000, maxLengthError },
+    { name: 'Option4Image', inputName: 'option4Image', headerError},
+    { name: 'AnswerNo', inputName: 'answerNo', required: true, requiredError, headerError },
+    { name: 'Level 1 Question Set Section', inputName: 'level1', headerError },
+    { name: 'Keywords', inputName: 'keywords', isArray: true, headerError },
+    { name: 'Author', inputName: 'author',headerError, maxLength: 300, maxLengthError },
+    { name: 'Copyright', inputName: 'copyright',headerError, maxLength: 300, maxLengthError },
+    { name: 'Attributions', inputName: 'attributions', isArray: true, headerError, maxLength: 300, maxLengthError }
+  ];
+
+  const validateRow = (row, rowIndex, flattenHierarchyObj) => {
+    if (_.isEmpty(row.questionText) && _.isEmpty(row.questionImage)) {
+      const name = headers.find((r) => r.inputName === 'questionText').name || '';
+      setError(`${name} is missing at row: ${rowIndex}`);
+    }
+
+    const options = [];
+    _.forEach(_.range(max_options_limit), (opt, index) => {
+      let optionValue = row[`option${index + 1}`] || '';
+      let optionImage = row[`option${index + 1}Image`] || '';
+      if(!_.isEmpty(optionValue) || !_.isEmpty(optionImage)) {
+        options.push({optionValue, optionImage});
       }
     });
-  });
-};
+
+    if(_.size(options)  === 0 ) {
+      setError(`Options are empty at row: ${rowIndex}`);
+    } else if(_.size(options) < 2 ) {
+      setError(`Minimum two options are required at row: ${rowIndex}`);
+    }
+
+    if(!_.includes(_.range(_.size(options), 0), _.toNumber(row.answerNo))) {
+      setError(`Answer number not valid at row: ${rowIndex}`);
+    }
+
+    if (!_.isEmpty(row.level1) && !_.has(flattenHierarchyObj, row.level1)) {
+      const name = headers.find((r) => r.inputName === 'level1').name || '';
+      setError(`${name} is invalid at row: ${rowIndex}`);
+      return;
+    }
+
+  };
+
+  uploadCsvConfig = {
+    headers: headers,
+    maxRows: bulkUploadConfig.maxRows,
+    validateRow,
+    maxRowsError,
+    noRowsError,
+    extraHeaderError
+  };
+}
+
+const setError = (message) => {
+  bulkUploadErrorMsgs.push(message);
+}
+
+const prepareQuestionData = (questionMetadata, req) => {
+  const requestedProperties = ['additionalCategories', 'board', 'medium', 'gradeLevel', 'subject', 'audience',
+                  'license', 'framework', 'topic', 'author','status', 'createdBy', 'questionType', 'questionSetId'];
+  questionMetadata['questionFileRefId'] = uuidv4();
+  questionMetadata['channel'] = req.get('x-channel-id');
+  questionMetadata = _.merge({}, questionMetadata, _.pick(req.body.request, requestedProperties));
+  if(!_.has(questionMetadata, 'status')) {
+    questionMetadata['status'] = 'Live';
+  }
+  return questionMetadata;
+}
 
 //question search API function;
 const qumlSearch = (req, res) => {
+  const rspObj = req.rspObj
   const searchData =  {
     "request": { 
         "filters":{
             "objectType":"Question",
             "status":[],
-            "processId":req.body.processId
+            "processId":req.body.request.processId
         },
         "fields":["identifier","processId","author","name","status","primaryCategory","questionUploadStatus","code","questionFileRefId"],
         "limit":1000
@@ -201,16 +239,16 @@ const qumlSearch = (req, res) => {
   })
     .then((response) => response.json())
     .then(async(resData) => {
-      rspObj.responseCode = "OK";
-      rspObj.result = {
-        questionStatus: `Successfully fetched the data for the given request: ${searchData}`,
-      };
+      console.log(resData);
+      rspObj.responseCode = resData.responseCode || responseCode.SUCCESS;
+      rspObj.result = { ...resData.result  }
       logger.info({ message: "Successfully Fetched the data", rspObj });
-      res.csv(resData.result.Question)
-    loggerService.exitLog(
-     "Successfully got the Questions",
-      rspObj,
-    );    
+      // res.csv(resData.result.Question)
+      loggerService.exitLog(
+      "Successfully got the Questions",
+        rspObj,
+      );    
+      return res.status(200).send(successResponse(rspObj))
     })
     .catch((error) => {
       rspObj.errMsg = "Something went wrong while fetching the data";
@@ -236,8 +274,42 @@ const qumlSearch = (req, res) => {
     });
 };
 
+//Read QuestionSet Hierarchy function;
+const getQuestionSetHierarchy = (questionSetId,reqHeaders,  callback) => {
+  if (_.isEmpty(questionSetId)) { return callback(null, {}); }
+  fetch(`${envVariables.SUNBIRD_ASSESSMENT_SERVICE_BASE_URL}/questionset/v1/hierarchy/${questionSetId}?mode=edit`, {
+    method: "GET",
+    headers: reqHeaders
+  })
+  .then((response) => response.json())
+  .then((readResponseData) => {
+    if (readResponseData.responseCode && _.toLower(readResponseData.responseCode) === "ok") {
+      callback(null, readResponseData.result.questionSet);
+    } else {
+      callback(readResponseData);
+    }
+  })
+  .catch((error) => {
+    logger.error({
+      message: `Something Went Wrong While fetching the questionset hierarchy ${error}`,
+    });
+    callback(error);
+  });
+};
+
+const getFlatHierarchyObj = (data, hierarchyObj = {}) => {
+  if (!_.isEmpty(data)) {
+    hierarchyObj[data.name] = data.identifier;
+  }
+  _.forEach(data.children, child => {
+    if (child.mimeType === "application/vnd.sunbird.questionset" && child.visibility === 'Parent') {
+      getFlatHierarchyObj(child, hierarchyObj);
+    }
+  });
+  return hierarchyObj;
+}
+
 module.exports = {
   bulkUpload,
-  qumlSearch,
-  readfile
-};
+  qumlSearch
+};
\ No newline at end of file
diff --git a/src/validators/qumlBulkUploadValidators.js b/src/validators/qumlBulkUploadValidators.js
new file mode 100644
index 0000000000000000000000000000000000000000..5b41fa0b46c566e6363b9131832afa031b73486b
--- /dev/null
+++ b/src/validators/qumlBulkUploadValidators.js
@@ -0,0 +1,57 @@
+const { body, validationResult } = require('express-validator');
+var _ = require('lodash')
+const messageUtils = require('../service/messageUtil');
+const programMessages = messageUtils.PROGRAM;
+const errorCodes = messageUtils.ERRORCODES;
+const loggerService = require('../service/loggerService');
+const responseCode = messageUtils.RESPONSE_CODE;
+const { errorResponse, loggerError } = require('../helpers/responseUtil');
+
+const qumlBulkUpload = () => {
+  return [
+    body('request').exists().withMessage('request object is missing'),
+    body('request.fileUrl')
+      .isString().withMessage('Metadata fileUrl should be a/an string value')
+      .notEmpty().withMessage('Required Metadata fileUrl should not be empty'),
+    body('request.questionType')
+      .isString().withMessage('Metadata questionType should be a/an string value')
+      .exists().withMessage('Required Metadata questionType not set')
+      .isIn(['MCQ']).withMessage('Metadata questionType should be one of: [MCQ]'),
+    body('request.status')
+      .optional()
+      .isString().withMessage('Metadata status should be a/an string value')
+      .isIn(['Live', 'Review', 'Draft']).withMessage('Metadata status should be one of: [Live, Review, Draft]'),
+    body('request.questionSetId')
+      .optional()
+      .isString().withMessage('Metadata questionSetId should be a/an string value')
+      .notEmpty().withMessage('Required Metadata questionSetId should not be empty'),
+  ]
+}
+
+const validate = (req, res, next) => {
+  const rspObj = req.rspObj;
+  const errCode = programMessages.EXCEPTION_CODE+'_'+ programMessages.QUML_BULKUPLOAD.EXCEPTION_CODE
+  const logObject = {
+    traceId : req.headers['x-request-id'] || '',
+    message : programMessages.QUML_BULKUPLOAD.INFO
+   }
+  loggerService.entryLog(req.body, logObject);
+  const errors = validationResult(req);
+  if (errors.isEmpty()) {
+    return next()
+  }
+  const extractedErrors = []; 
+  errors.array().map(err => extractedErrors.push(err.msg))
+  rspObj.errCode = responseCode.CLIENT_ERROR;
+  rspObj.errMsg = programMessages.QUML_BULKUPLOAD.VALIDATION_MESSAGE;
+  rspObj.result = { messages: extractedErrors };
+  rspObj.responseCode = responseCode.CLIENT_ERROR;
+  loggerService.exitLog({responseCode: rspObj.responseCode}, logObject);
+  loggerError(rspObj,errCode+errorCodes.CODE1)
+  return res.status(400).send(errorResponse(rspObj,errCode+errorCodes.CODE1));
+}
+
+module.exports = {
+  qumlBulkUploadValidator : qumlBulkUpload,
+  validate
+}
\ No newline at end of file