diff --git a/ansible/artifacts-download.yml b/ansible/artifacts-download.yml
index 043446554d6c2d9180202b87bf3d709eabfffc09..46167180e4190e5db9feac226ef06566acb503ff 100644
--- a/ansible/artifacts-download.yml
+++ b/ansible/artifacts-download.yml
@@ -9,11 +9,11 @@
         name: azure-cloud-storage
         tasks_from: blob-download.yml
       vars:
-        blob_container_name: "{{ artifacts_container }}"
+        blob_container_name: "{{ cloud_storage_artifacts_bucketname }}"
         blob_file_name: "{{ artifact }}"
         local_file_or_folder_path: "{{ artifact_path }}"
-        storage_account_name: "{{ azure_artifact_storage_account_name }}"
-        storage_account_key: "{{ azure_artifact_storage_account_key }}"
+        storage_account_name: "{{ cloud_artifact_storage_accountname }}"
+        storage_account_key: "{{ cloud_artifact_storage_secret }}"
       when: cloud_service_provider == "azure"
 
     - name: download artifact from gcloud storage
@@ -21,9 +21,8 @@
         name: gcp-cloud-storage
         tasks_from: download.yml
       vars:
-        gcp_bucket_name: "{{ gcloud_artifact_bucket_name }}"
-        dest_folder_name: "{{ artifacts_container }}"
-        dest_file_name: "{{ artifact }}"
+        gcp_bucket_name: "{{ cloud_storage_artifacts_bucketname }}"
+        gcp_path: "{{ artifact }}"
         local_file_or_folder_path: "{{ artifact_path }}"
       when: cloud_service_provider == "gcloud"
 
@@ -33,9 +32,9 @@
         tasks_from: download.yml
       vars:
         local_file_or_folder_path: "{{ artifact_path }}"
-        s3_bucket_name: "{{ aws_artifact_s3_bucket_name }}"
-        s3_path: "{{ artifacts_container }}/{{ artifact }}"
-        aws_default_region: "{{ aws_region }}"
-        aws_access_key_id: "{{ aws_artifact_bucket_access_key }}"
-        aws_secret_access_key: "{{ aws_artifact_bucket_secret_access_key }}"
-      when: cloud_service_provider == "aws"  
\ No newline at end of file
+        s3_bucket_name: "{{ cloud_storage_artifacts_bucketname }}"
+        s3_path: "{{ artifact }}"
+        aws_default_region: "{{ cloud_public_storage_region }}"
+        aws_access_key_id: "{{ cloud_artifact_storage_accountname }}"
+        aws_secret_access_key: "{{ cloud_artifact_storage_secret }}"
+      when: cloud_service_provider == "aws"  
diff --git a/ansible/artifacts-upload.yml b/ansible/artifacts-upload.yml
index 32e866808c1cfe31bdb6642463a81c207084a2e7..3bdbe7301799996f52fdf7ff447feb4f77de0cd4 100644
--- a/ansible/artifacts-upload.yml
+++ b/ansible/artifacts-upload.yml
@@ -9,12 +9,12 @@
         name: azure-cloud-storage
         tasks_from: blob-upload.yml
       vars:
-        blob_container_name: "{{ artifacts_container }}"
+        blob_container_name: "{{ cloud_storage_artifacts_bucketname }}"
         container_public_access: "off"
         blob_file_name: "{{ artifact }}"
         local_file_or_folder_path: "{{ artifact_path }}"
-        storage_account_name: "{{ azure_artifact_storage_account_name }}"
-        storage_account_key: "{{ azure_artifact_storage_account_key }}"
+        storage_account_name: "{{ cloud_artifact_storage_accountname }}"
+        storage_account_key: "{{ cloud_artifact_storage_secret }}"
       when: cloud_service_provider == "azure"
 
     - name: upload artifact to gcloud storage
@@ -22,9 +22,8 @@
         name: gcp-cloud-storage
         tasks_from: upload.yml
       vars:
-        gcp_bucket_name: "{{ gcloud_artifact_bucket_name }}"
-        dest_folder_name: "{{ artifacts_container }}"
-        dest_file_name: "{{ artifact }}"
+        gcp_bucket_name: "{{ cloud_storage_artifacts_bucketname }}"
+        gcp_path: "{{ artifact }}"
         local_file_or_folder_path: "{{ artifact_path }}"
       when: cloud_service_provider == "gcloud"
 
@@ -34,9 +33,9 @@
         tasks_from: upload.yml
       vars:
         local_file_or_folder_path: "{{ artifact_path }}"
-        s3_bucket_name: "{{ aws_artifact_s3_bucket_name }}"
-        s3_path: "{{ artifacts_container }}/{{ artifact }}"
-        aws_default_region: "{{ aws_region }}"
-        aws_access_key_id: "{{ aws_artifact_bucket_access_key }}"
-        aws_secret_access_key: "{{ aws_artifact_bucket_secret_access_key }}"
-      when: cloud_service_provider == "aws"
\ No newline at end of file
+        s3_bucket_name: "{{ cloud_storage_artifacts_bucketname }}"
+        s3_path: "{{ artifact }}"
+        aws_default_region: "{{ cloud_public_storage_region }}"
+        aws_access_key_id: "{{ cloud_artifact_storage_accountname }}"
+        aws_secret_access_key: "{{ cloud_artifact_storage_secret }}"
+      when: cloud_service_provider == "aws"
diff --git a/ansible/artifacts/sunbird/login/resources/js/telemetry_service.js b/ansible/artifacts/sunbird/login/resources/js/telemetry_service.js
index e1f88cc741dbe280a3aeceb16ac2bc39f1bd6a45..f9250fd425df73ae790c6c00bd916c017f3fcd02 100644
--- a/ansible/artifacts/sunbird/login/resources/js/telemetry_service.js
+++ b/ansible/artifacts/sunbird/login/resources/js/telemetry_service.js
@@ -2506,7 +2506,7 @@ if(client_id.toLowerCase() === 'android'){
     "telemetry": {
       "pdata": {
         "id": pdataId,
-        "ver": "5.0.0",
+        "ver": "5.1.0",
         "pid": "sunbird-portal"
       }
     }
@@ -2687,7 +2687,6 @@ if(client_id.toLowerCase() === 'android'){
   function stringToHTML(str) {
     let parser = new DOMParser();
     let doc = parser.parseFromString(str, 'text/html');
-    console.log('Doc parse => ', doc); // TODO: log!
     return doc?.body?.innerText || document.createElement('body');
   }
 
diff --git a/ansible/assets-upload.yml b/ansible/assets-upload.yml
index 12021680feecfeed8ddeb9ad2384a658beba5cc9..09e7df6ceb281d37ddb54aea8b0f2f492a1cf956 100644
--- a/ansible/assets-upload.yml
+++ b/ansible/assets-upload.yml
@@ -2,25 +2,17 @@
 - hosts: localhost
   vars_files:
     - ['{{inventory_dir}}/secrets.yml', 'secrets/{{env}}.yml']
-  # The vars: section is added for the below reason
-  # 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-  # 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-  #    or other default files and just assign the value to the newly introduced common variable 
-  # 3. After few releases, we will remove the older variables and use only the new variables across the repos
-  vars:
-    player_cdn_storage: "{{ player_cdn_container }}"
   # Azure
   tasks:
   - name: this block consists of tasks related to azure storage
     block:
     - name: set common azure variables
       set_fact:
-        blob_container_name: "{{ player_cdn_storage }}"
+        blob_container_name: "{{ cloud_storage_playercdn_bucketname }}"
         container_public_access: "container"
         blob_container_folder_path: ""
-        storage_account_name: "{{ azure_public_storage_account_name }}"
-        storage_account_key: "{{ azure_public_storage_account_key }}"
-        storage_account_sas_token: "{{ azure_public_storage_account_sas }}"
+        storage_account_name: "{{ cloud_public_storage_accountname }}"
+        storage_account_key: "{{ cloud_public_storage_secret }}"
 
     - name: delete files and folders from azure storage using azcopy
       include_role:
@@ -40,11 +32,11 @@
     block:
     - name: set common aws variables
       set_fact:
-        s3_bucket_name: "{{ aws_public_s3_bucket_name }}"
-        s3_path: "{{ player_cdn_storage }}"
-        aws_default_region: "{{ aws_region }}"
-        aws_access_key_id: "{{ aws_public_bucket_access_key }}"
-        aws_secret_access_key: "{{ aws_public_bucket_secret_access_key }}"
+        s3_bucket_name: "{{ cloud_storage_playercdn_bucketname }}"
+        s3_path: ""
+        aws_default_region: "{{ cloud_public_storage_region }}"
+        aws_access_key_id: "{{ cloud_public_storage_accountname }}"
+        aws_secret_access_key: "{{ cloud_public_storage_secret }}"
 
     - name: delete files and folders from s3
       include_role:
@@ -64,10 +56,10 @@
     block:
     - name: set common gcloud variables
       set_fact:
-        dest_folder_name: "{{ player_cdn_storage }}"
-        dest_folder_path: ""
-        file_delete_pattern: "{{ player_cdn_storage }}/"
-        gcp_bucket_name: "{{ gcloud_public_bucket_name }}"
+        gcp_bucket_name: "{{ cloud_storage_playercdn_bucketname }}"
+        gcp_path: ""
+        file_delete_pattern: ""
+        
 
     - name: delete files and folders from gcloud storage
       include_role:
diff --git a/ansible/deploy-plugins.yml b/ansible/deploy-plugins.yml
index 8da2bd445e39023bb1a223d58fb8be818a9f86d4..a78ce1c640ae4432d6227e7a0fad44825595fcb2 100644
--- a/ansible/deploy-plugins.yml
+++ b/ansible/deploy-plugins.yml
@@ -2,13 +2,6 @@
   gather_facts: no
   vars_files:
     - "{{inventory_dir}}/secrets.yml"
-  # The vars: section is added for the below reason
-  # 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-  # 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-  #    or other default files and just assign the value to the newly introduced common variable 
-  # 3. After few releases, we will remove the older variables and use only the new variables across the repos
-  vars:
-    plugin_storage: "{{ plugin_container_name }}"
   tasks:
     - name: rename env_domain in preview_cdn.html for CDN
       shell: |
@@ -22,11 +15,10 @@
       block:
         - name: set common azure variables
           set_fact:
-            blob_container_name: "{{ plugin_storage }}"
+            blob_container_name: "{{ cloud_storage_content_bucketname }}"
             container_public_access: "container"
-            storage_account_name: "{{ azure_public_storage_account_name }}"
-            storage_account_key: "{{ azure_public_storage_account_key }}"
-            storage_account_sas_token: "{{ azure_public_storage_account_sas }}"
+            storage_account_name: "{{ cloud_public_storage_accountname }}"
+            storage_account_key: "{{ cloud_public_storage_secret }}"
           tags:
             - always
           no_log: True
@@ -86,13 +78,15 @@
           tags:
             - plugins
       when: cloud_service_provider == "azure"
-    
+
+### GCP tasks ####
     - name: this block consists of tasks related to gcloud storage
       block:
         - name: set common gcloud variables
           set_fact:
-            dest_folder_name: "{{ plugin_storage }}"
-            gcp_bucket_name: "{{ gcloud_public_bucket_name }}"
+            gcp_bucket_name: "{{ cloud_storage_content_bucketname }}"
+            gcp_path: ""
+
           tags:
             - always
   
@@ -102,7 +96,7 @@
               name: gcp-cloud-storage
               tasks_from: delete-batch.yml
           vars:
-            file_delete_pattern: "{{ dest_folder_name }}/{{ folder_name }}"
+            file_delete_pattern: "{{ folder_name }}"
           tags:
             - content-editor
             - collection-editor
@@ -115,7 +109,7 @@
               name: gcp-cloud-storage
               tasks_from: upload-batch.yml
             vars:
-              dest_folder_path: "{{ folder_name }}"
+              gcp_path: "{{ folder_name }}"
               local_file_or_folder_path: "{{ source_name }}"
           tags:
             - content-editor
@@ -131,10 +125,35 @@
               name: gcp-cloud-storage
               tasks_from: upload.yml
             vars:
-              dest_file_name: "artefacts/content-player/content-player-{{ player_version_number }}.zip"
+              gcp_path: "artefacts/content-player/content-player-{{ player_version_number }}.zip"
               local_file_or_folder_path: "{{ source_file_name }}"
           tags:
             - preview
+
+        - block:
+          - name: Authenticate to gcloud
+            include_role:
+              name: gcp-cloud-storage
+              tasks_from: gcloud-auth.yml
+
+          - name: delete and re-upload plugins
+            include_role:
+              name: gcp-cloud-storage
+              tasks_from: "{{ item[0] }}"
+            vars:
+              file_delete_pattern: "content-plugins/{{ item[1] }}/*"
+              gcp_path: "content-plugins/{{ item[1] }}"
+              local_file_or_folder_path: "{{ source_folder }}/{{ item[1] }}"
+            with_nested:
+              - ['delete-batch-no-poll.yml', 'upload-batch-no-poll.yml']
+              - "{{ lookup('file', plugins_to_delete_and_upload).split('\n') }}"
+
+          - name: Revoke gcloud access
+            include_role:
+              name: gcp-cloud-storage
+              tasks_from: gcloud-revoke.yml
+          tags:
+            - plugins
       when: cloud_service_provider == "gcloud"
 
 ################################### AWS tasks #########################
@@ -142,10 +161,10 @@
       block:
         - name: set common aws variables
           set_fact:
-            aws_default_region: "{{ aws_region }}"
-            s3_bucket_name: "{{ aws_public_s3_bucket_name }}"
-            aws_access_key_id: "{{ aws_public_bucket_access_key }}"
-            aws_secret_access_key: "{{ aws_public_bucket_secret_access_key }}"
+            aws_default_region: "{{ cloud_public_storage_region }}"
+            s3_bucket_name: "{{ cloud_storage_content_bucketname }}"
+            aws_access_key_id: "{{ cloud_public_storage_accountname }}"
+            aws_secret_access_key: "{{ cloud_public_storage_secret }}"
           tags:
             - always
       
@@ -155,7 +174,7 @@
               name: aws-cloud-storage
               tasks_from: delete-folder.yml
           vars:
-              s3_path: "{{ plugin_storage }}/{{ folder_name }}"
+              s3_path: "{{ folder_name }}"
           tags:
             - content-editor
             - collection-editor
@@ -168,7 +187,7 @@
               name: aws-cloud-storage
               tasks_from: upload-folder.yml
             vars:
-              s3_path: "{{ plugin_storage }}/{{ folder_name }}"
+              s3_path: "{{ folder_name }}"
               local_file_or_folder_path: "{{ source_name }}"
           tags:
             - content-editor 
@@ -184,14 +203,14 @@
               name: aws-cloud-storage
               tasks_from: upload.yml
             vars:
-              s3_path: "{{ plugin_storage }}/artefacts/content-player/content-player-{{ player_version_number }}.zip"
+              s3_path: "artefacts/content-player/content-player-{{ player_version_number }}.zip"
               local_file_or_folder_path: "{{ source_file_name }}"
           tags:
             - preview
       
         - block:
           - name:  run the s3_copy.sh script
-            shell: "bash {{ s3_file_path }} {{ plugin_storage }} {{ source_file }} {{ aws_public_s3_bucket_name }}"
+            shell: "bash {{ s3_file_path }} {{ source_file }} {{ cloud_public_storage_accountname }}"
             async: 3600
             poll: 10
             environment:
@@ -200,4 +219,4 @@
               AWS_SECRET_ACCESS_KEY: "{{ aws_secret_access_key }}"
           tags:
             - plugins
-      when: cloud_service_provider == "aws"
\ No newline at end of file
+      when: cloud_service_provider == "aws"
diff --git a/ansible/desktop-faq-upload.yml b/ansible/desktop-faq-upload.yml
index 911153576b40293e1adfec110f21278a083a6e8e..3683202043b12cfb4f53c4f9846c9afd5b552448 100644
--- a/ansible/desktop-faq-upload.yml
+++ b/ansible/desktop-faq-upload.yml
@@ -6,7 +6,7 @@
       block:
         - name: set common azure variables
           set_fact:
-            blob_container_name: "{{ upload_storage }}"
+            blob_container_name: ""
             blob_file_name: "{{ destination_path }}"
             blob_container_folder_path: "/{{ destination_path }}"
             local_file_or_folder_path: "{{ playbook_dir }}/../{{ src_file_path }}"
@@ -20,8 +20,9 @@
               tasks_from: blob-upload.yml
             vars:
               container_public_access: "container"
-              storage_account_name: "{{ azure_public_storage_account_name }}"
-              storage_account_key: "{{ azure_public_storage_account_key }}"
+              blob_container_name: "{{ cloud_storage_public_bucketname }}"
+              storage_account_name: "{{ cloud_public_storage_accountname }}"
+              storage_account_key: "{{ cloud_public_storage_secret }}"
           tags:
             - upload-desktop-faq
       
@@ -32,32 +33,58 @@
               tasks_from: blob-upload.yml
             vars:
               container_public_access: "off"
-              storage_account_name: "{{ azure_private_storage_account_name }}"
-              storage_account_key: "{{ azure_private_storage_account_key }}"
+              blob_container_name: "{{ cloud_storage_label_bucketname }}"
+              storage_account_name: "{{ cloud_private_storage_accountname }}"
+              storage_account_key: "{{ cloud_private_storage_secret }}"
           tags:
             - upload-label
       
         - block:
-          - name: upload batch of files to azure storage
+          - name: upload batch of files to azure storage - chatbot
             include_role:
               name: azure-cloud-storage
               tasks_from: blob-upload-batch.yml
             vars:
               container_public_access: "container"
-              storage_account_name: "{{ azure_public_storage_account_name }}"
-              storage_account_key: "{{ azure_public_storage_account_key }}"
+              blob_container_name: "{{ cloud_storage_chatbot_bucketname }}"
+              storage_account_name: "{{ cloud_public_storage_accountname }}"
+              storage_account_key: "{{ cloud_public_storage_secret }}"
           tags:
             - upload-chatbot-config
-            - upload-batch
+        
+        - block:
+          - name: upload batch of files to azure storage - csv-template
+            include_role:
+              name: azure-cloud-storage
+              tasks_from: blob-upload-batch.yml
+            vars:
+              container_public_access: "container"
+              blob_container_name: "{{ cloud_storage_sourcing_bucketname }}"
+              storage_account_name: "{{ cloud_public_storage_accountname }}"
+              storage_account_key: "{{ cloud_public_storage_secret }}"
+          tags:
+            - upload-csv-template
+
+        - block:
+          - name: upload batch of files to azure storage - discussion-ui
+            include_role:
+              name: azure-cloud-storage
+              tasks_from: blob-upload-batch.yml
+            vars:
+              container_public_access: "container"
+              blob_container_name: "{{ cloud_storage_discussionui_bucketname }}"
+              storage_account_name: "{{ cloud_public_storage_accountname }}"
+              storage_account_key: "{{ cloud_public_storage_secret }}"
+          tags:
+            - upload-discussion-ui
       when: cloud_service_provider == "azure"
 
+### GCP tasks ###
     - name: this block consists of tasks related to gcloud storage
       block:
         - name: set common gcloud variables
           set_fact:
-            dest_folder_name: "{{ upload_storage }}"
-            dest_file_name: "{{ destination_path }}"
-            dest_folder_path: "{{ destination_path }}"
+            gcp_path: "{{ destination_path }}"
             local_file_or_folder_path: "{{ playbook_dir }}/../{{ src_file_path }}"
           tags:
             - always
@@ -68,7 +95,7 @@
               name: gcp-cloud-storage
               tasks_from: upload.yml
             vars:
-              gcp_bucket_name: "{{ gcloud_public_bucket_name }}"
+              gcp_bucket_name: "{{ cloud_storage_public_bucketname }}"
           tags:
             - upload-desktop-faq
       
@@ -78,20 +105,39 @@
               name: gcp-cloud-storage
               tasks_from: upload.yml
             vars:
-              gcp_bucket_name: "{{ gcloud_private_bucket_name }}"
+              gcp_bucket_name: "{{ cloud_storage_label_bucketname }}"
           tags:
             - upload-label
 
         - block:
-          - name: upload batch of files to gcloud storage
+          - name: upload batch of files to gcloud storage - chatbot
             include_role:
               name: gcp-cloud-storage
               tasks_from: upload-batch.yml
             vars:
-              gcp_bucket_name: "{{ gcloud_public_bucket_name }}"
+              gcp_bucket_name: "{{ cloud_storage_chatbot_bucketname }}"
           tags:
             - upload-chatbot-config
-            - upload-batch
+
+        - block:
+          - name: upload batch of files to gcloud storage - csv-template
+            include_role:
+              name: gcp-cloud-storage
+              tasks_from: upload-batch.yml
+            vars:
+              gcp_bucket_name: "{{ cloud_storage_sourcing_bucketname }}"
+          tags:
+            - upload-csv-template
+
+        - block:
+          - name: upload batch of files to gcloud storage - discussion-ui
+            include_role:
+              name: gcp-cloud-storage
+              tasks_from: upload-batch.yml
+            vars:
+              gcp_bucket_name: "{{ cloud_storage_discussionui_bucketname }}"
+          tags:
+            - upload-discussion-ui
       when: cloud_service_provider == "gcloud"
 
 ######################## AWS tasks #########################################
@@ -100,9 +146,9 @@
       block:
         - name: set common aws variables
           set_fact:
-            aws_default_region: "{{ aws_region }}"
+            aws_default_region: "{{ cloud_public_storage_region }}"
             local_file_or_folder_path: "{{ playbook_dir }}/../{{ src_file_path }}"
-            s3_path: "{{ upload_storage }}/{{ destination_path }}"
+            s3_path: "{{ destination_path }}"
           tags:
             - always
       
@@ -112,9 +158,9 @@
               name: aws-cloud-storage
               tasks_from: upload.yml
             vars:
-              s3_bucket_name: "{{ aws_public_s3_bucket_name }}"
-              aws_access_key_id: "{{ aws_public_bucket_access_key }}"
-              aws_secret_access_key: "{{ aws_public_bucket_secret_access_key }}"
+              s3_bucket_name: "{{ cloud_storage_public_bucketname }}"
+              aws_access_key_id: "{{ cloud_public_storage_accountname }}"
+              aws_secret_access_key: "{{ cloud_public_storage_secret }}"
           tags:
             - upload-desktop-faq
       
@@ -124,23 +170,45 @@
               name: aws-cloud-storage
               tasks_from: upload.yml
             vars:
-              s3_bucket_name: "{{ aws_private_s3_bucket_name }}"
-              aws_access_key_id: "{{ aws_private_bucket_access_key }}"
-              aws_secret_access_key: "{{ aws_private_bucket_secret_access_key }}"
+              s3_bucket_name: "{{ cloud_storage_label_bucketname }}"
+              aws_access_key_id: "{{ cloud_private_storage_accountname }}"
+              aws_secret_access_key: "{{ cloud_private_storage_secret }}"
           tags:
             - upload-label
       
         - block:
-          - name: upload folder to aws s3
+          - name: upload folder to aws s3 - chatbot
             include_role:
               name: aws-cloud-storage
               tasks_from: upload-folder.yml
             vars:
-              s3_bucket_name: "{{ aws_public_s3_bucket_name }}"
-              aws_access_key_id: "{{ aws_public_bucket_access_key }}"
-              aws_secret_access_key: "{{ aws_public_bucket_secret_access_key }}"
+              s3_bucket_name: "{{ cloud_storage_chatbot_bucketname }}"
+              aws_access_key_id: "{{ cloud_public_storage_accountname }}"
+              aws_secret_access_key: "{{ cloud_public_storage_secret }}"
           tags:
             - upload-chatbot-config
-            - upload-batch
-      when: cloud_service_provider == "aws"    
-      
\ No newline at end of file
+
+        - block:
+          - name: upload folder to aws s3 - csv-template
+            include_role:
+              name: aws-cloud-storage
+              tasks_from: upload-folder.yml
+            vars:
+              s3_bucket_name: "{{ cloud_storage_sourcing_bucketname }}"
+              aws_access_key_id: "{{ cloud_public_storage_accountname }}"
+              aws_secret_access_key: "{{ cloud_public_storage_secret }}"
+          tags:
+            - upload-csv-template
+
+        - block:
+          - name: upload folder to aws s3 - discussion-ui
+            include_role:
+              name: aws-cloud-storage
+              tasks_from: upload-folder.yml
+            vars:
+              s3_bucket_name: "{{ cloud_storage_discussionui_bucketname }}"
+              aws_access_key_id: "{{ cloud_public_storage_accountname }}"
+              aws_secret_access_key: "{{ cloud_public_storage_secret }}"
+          tags:
+            - upload-discussion-ui
+      when: cloud_service_provider == "aws"
diff --git a/ansible/dial_upload-schema.yml b/ansible/dial_upload-schema.yml
index f046e6346294acfc3db5c7ed7b500c2d98c83470..757a80f6e51b9cfbd879e20c615528542aac74fd 100644
--- a/ansible/dial_upload-schema.yml
+++ b/ansible/dial_upload-schema.yml
@@ -2,13 +2,6 @@
   gather_facts: no
   vars_files:
     - "{{inventory_dir}}/secrets.yml"
-  # The vars: section is added for the below reason
-  # 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-  # 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-  #    or other default files and just assign the value to the newly introduced common variable 
-  # 3. After few releases, we will remove the older variables and use only the new variables across the repos
-  vars:
-    dial_plugin_storage: "{{ dial_plugin_container_name }}"
   tasks:
     - name: Create directories
       file:
@@ -31,12 +24,12 @@
         name: azure-cloud-storage
         tasks_from: blob-upload-batch.yml
       vars:
-        blob_container_name: "{{ dial_plugin_storage }}"
+        blob_container_name: "{{ cloud_storage_dial_bucketname }}"
         container_public_access: "blob"
         blob_container_folder_path: "/schemas/local"
         local_file_or_folder_path: "dial_schema_template_files"
-        storage_account_name: "{{ azure_public_storage_account_name }}"
-        storage_account_key: "{{ azure_public_storage_account_key }}"
+        storage_account_name: "{{ cloud_public_storage_accountname }}"
+        storage_account_key: "{{ cloud_public_storage_secret }}"
       when: cloud_service_provider == "azure"
     
     - name: upload batch of files to aws s3
@@ -44,12 +37,12 @@
         name: aws-cloud-storage
         tasks_from: upload-folder.yml
       vars:
-        s3_bucket_name: "{{ aws_public_s3_bucket_name }}"
-        aws_access_key_id: "{{ aws_public_bucket_access_key }}"
-        aws_secret_access_key: "{{ aws_public_bucket_secret_access_key }}"
-        aws_default_region: "{{ aws_region }}"
+        s3_bucket_name: "{{ cloud_storage_dial_bucketname }}"
+        aws_access_key_id: "{{ cloud_public_storage_accountname }}"
+        aws_secret_access_key: "{{ cloud_public_storage_secret }}"
+        aws_default_region: "{{ cloud_public_storage_region }}"
         local_file_or_folder_path: "dial_schema_template_files"
-        s3_path: "{{ dial_plugin_storage }}/schemas/local"
+        s3_path: "schemas/local"
       when: cloud_service_provider == "aws"
 
     - name: upload batch of files to gcloud storage
@@ -57,9 +50,8 @@
               name: gcp-cloud-storage
               tasks_from: upload-batch.yml
       vars:
-        dest_folder_name: "{{ dial_plugin_storage }}"
-        dest_folder_path: "schemas/local"
+        gcp_bucket_name: "{{ cloud_storage_dial_bucketname }}"
+        gcp_path: "schemas/local"
         local_file_or_folder_path: "dial_schema_template_files"
-        gcp_bucket_name: "{{ gcloud_public_bucket_name }}"
       when: cloud_service_provider == "gcloud"
 
diff --git a/ansible/inventory/env/group_vars/all.yml b/ansible/inventory/env/group_vars/all.yml
index 1aaa166d5c4869442ab6e7d9c8f3a077182ee85e..8dfdd8a43db4177c99a3542f86faa220a458f064 100644
--- a/ansible/inventory/env/group_vars/all.yml
+++ b/ansible/inventory/env/group_vars/all.yml
@@ -128,10 +128,11 @@ cassandra_version: '3.9'
 cassandra_port: 9042
 cassandra_rpc_address: 0.0.0.0
 cassandra_restore_dir: "/home/{{ ansible_ssh_user }}/"
-cassandra_backup_azure_container_name: cassandra-backup
 cassandra_backup_dir: /data/cassandra/backup
 ### Release 5.0.0 ###
 cassandra_multi_dc_enabled: false
+# Release-5.0.1
+cloud_storage_base_url: "{{cloud_storage_base_url}}"
 
 keycloak_realm: sunbird
 sunbird_content_player_url: "http://kong:8000/"
diff --git a/ansible/kill_spark_jobs.yaml b/ansible/kill_spark_jobs.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..01c01c5bcf73e697b9acedef65d8b2d55fead320
--- /dev/null
+++ b/ansible/kill_spark_jobs.yaml
@@ -0,0 +1,12 @@
+---
+- hosts: spark
+  become: yes
+  tasks:
+  - name: get pids of job manager which may be orphaned
+    shell: ps -ef | grep [j]ob. | awk '{print $2}'
+    register: pids_of_jobmanager
+
+  - name: kill the orphan job manager pids
+    shell: "kill -9 {{ item | int }}"
+    with_items:
+    - "{{ pids_of_jobmanager.stdout_lines }}"
diff --git a/ansible/kp_upload-schema.yml b/ansible/kp_upload-schema.yml
index aecdab077a127ae98adb962434c06aa86ab956ec..d12b74433dc7f9cda413cb583f97895f3815dfd7 100644
--- a/ansible/kp_upload-schema.yml
+++ b/ansible/kp_upload-schema.yml
@@ -2,25 +2,18 @@
   gather_facts: no
   vars_files:
     - "{{inventory_dir}}/secrets.yml"
-  # The vars: section is added for the below reason
-  # 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-  # 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-  #    or other default files and just assign the value to the newly introduced common variable 
-  # 3. After few releases, we will remove the older variables and use only the new variables across the repos
-  vars:
-    plugin_storage: "{{ plugin_container_name }}"
   tasks:
     - name: upload batch of files to azure storage
       include_role:
         name: azure-cloud-storage
         tasks_from: blob-upload-batch.yml
       vars:
-        blob_container_name: "{{ plugin_storage }}"
+        blob_container_name: "{{ cloud_storage_content_bucketname }}"
         container_public_access: "container"
         blob_container_folder_path: "/schemas/local"
         local_file_or_folder_path: "{{ source_name }}"
-        storage_account_name: "{{ azure_public_storage_account_name }}"
-        storage_account_key: "{{ azure_public_storage_account_key }}"
+        storage_account_name: "{{ cloud_public_storage_accountname }}"
+        storage_account_key: "{{ cloud_public_storage_secret }}"
       when: cloud_service_provider == "azure"
 
     - name: upload batch of files to aws s3
@@ -28,12 +21,12 @@
         name: aws-cloud-storage
         tasks_from: upload-folder.yml
       vars:
-        s3_bucket_name: "{{ aws_public_s3_bucket_name }}"
-        aws_access_key_id: "{{ aws_public_bucket_access_key }}"
-        aws_secret_access_key: "{{ aws_public_bucket_secret_access_key }}"
-        aws_default_region: "{{ aws_region }}"
+        s3_bucket_name: "{{ cloud_storage_content_bucketname }}"
+        aws_access_key_id: "{{ cloud_public_storage_accountname }}"
+        aws_secret_access_key: "{{ cloud_public_storage_secret }}"
+        aws_default_region: "{{ cloud_public_storage_region }}"
         local_file_or_folder_path: "{{ source_name }}"
-        s3_path: "{{ plugin_storage }}/schemas/local"
+        s3_path: "schemas/local"
       when: cloud_service_provider == "aws"
       
     - name: upload batch of files to gcloud storage
@@ -41,8 +34,7 @@
           name: gcp-cloud-storage
           tasks_from: upload-batch.yml
       vars:
-        dest_folder_name: "{{ plugin_storage }}"
-        dest_folder_path: "schemas/local"
+        gcp_bucket_name: "{{ cloud_storage_content_bucketname }}"
+        gcp_path: "{{ schemas/local"
         local_file_or_folder_path: "{{ source_name }}"
-        gcp_bucket_name: "{{ gcloud_public_bucket_name }}"
       when: cloud_service_provider == "gcloud"  
diff --git a/ansible/plugins.yml b/ansible/plugins.yml
index ab32d9f756f8e65006ff012ccb1bd64a4f998292..fa5967b462c144ca5bc224bf98c4eb03dabe67c9 100644
--- a/ansible/plugins.yml
+++ b/ansible/plugins.yml
@@ -20,8 +20,8 @@
           blob_delete_pattern: "content-plugins/{{ plugins_name }}"
           blob_container_folder_path: "/content-plugins/{{ plugins_name }}"
           local_file_or_folder_path: "{{ source_file }}"
-          storage_account_name: "{{ azure_public_storage_account_name }}"
-          storage_account_key: "{{ azure_public_storage_account_key }}"
+          storage_account_name: "{{ cloud_public_storage_accountname }}"
+          storage_account_key: "{{ cloud_public_storage_secret }}"
     
       - name: delete batch of files from azure storage
         include_role:
@@ -34,14 +34,14 @@
           tasks_from: blob-upload-batch.yml
     when: cloud_service_provider == "azure"
 
+### GCP tasks ###
   - name: this block consists of tasks related to gcloud storage
     block:
         - name: set common gcloud variables
           set_fact:
-            dest_folder_name: "{{ plugin_storage }}"
-            gcp_bucket_name: "{{ gcloud_public_bucket_name }}"
+            gcp_bucket_name: "{{ cloud_storage_content_bucketname }}"
+            gcp_path: "/content-plugins/{{ plugins_name }}"
             file_delete_pattern: "content-plugins/{{ plugins_name }}"
-            dest_folder_path: "/content-plugins/{{ plugins_name }}"
             local_file_or_folder_path: "{{ source_file }}"
 
         - name: delete files and folders from gcloud storage
diff --git a/ansible/roles/azure-cloud-storage/tasks/delete-using-azcopy.yml b/ansible/roles/azure-cloud-storage/tasks/delete-using-azcopy.yml
index 236169e86c80877f927e548a758755991bf1332b..196de9c9b3901299a8315052438800042082d348 100644
--- a/ansible/roles/azure-cloud-storage/tasks/delete-using-azcopy.yml
+++ b/ansible/roles/azure-cloud-storage/tasks/delete-using-azcopy.yml
@@ -1,6 +1,16 @@
 ---
+- name: generate SAS token for azcopy
+  shell: |
+   sas_expiry=`date -u -d "1 hour" '+%Y-%m-%dT%H:%MZ'`
+   sas_token=?`az storage container generate-sas -n {{ blob_container_name }} --account-name {{ storage_account_name }} --account-key {{ storage_account_key }} --https-only --permissions dlrw --expiry $sas_expiry -o tsv`
+   echo $sas_token
+  register: sas_token
+
+- set_fact:
+   container_sas_token: "{{ sas_token.stdout}}"
+
 - name: delete files and folders from azure storage using azcopy
-  shell: "azcopy rm 'https://{{ storage_account_name }}.blob.core.windows.net/{{ blob_container_name }}{{ blob_container_folder_path }}{{ storage_account_sas_token }}' --recursive"
+  shell: "azcopy rm 'https://{{ storage_account_name }}.blob.core.windows.net/{{ blob_container_name }}{{ blob_container_folder_path }}{{ container_sas_token }}' --recursive"
   environment:
     AZCOPY_CONCURRENT_FILES: "10"
   async: 10800
diff --git a/ansible/roles/azure-cloud-storage/tasks/upload-using-azcopy.yml b/ansible/roles/azure-cloud-storage/tasks/upload-using-azcopy.yml
index affbc8c00243db0b694592cd2b1c163b91e6c524..95da584c9bc2d4892c62ee9a91c5db0256303842 100644
--- a/ansible/roles/azure-cloud-storage/tasks/upload-using-azcopy.yml
+++ b/ansible/roles/azure-cloud-storage/tasks/upload-using-azcopy.yml
@@ -1,4 +1,14 @@
 ---
+- name: generate SAS token for azcopy
+  shell: |
+   sas_expiry=`date -u -d "1 hour" '+%Y-%m-%dT%H:%MZ'`
+   sas_token=?`az storage container generate-sas -n {{ blob_container_name }} --account-name {{ storage_account_name }} --account-key {{ storage_account_key }} --https-only --permissions dlrw --expiry $sas_expiry -o tsv`
+   echo $sas_token
+  register: sas_token
+
+- set_fact:
+   container_sas_token: "{{ sas_token.stdout}}"
+
 - name: create container in azure storage if it doesn't exist
   include_role:
     name: azure-cloud-storage
@@ -6,7 +16,7 @@
   when: create_container == True
 
 - name: upload files and folders to azure storage using azcopy
-  shell: "azcopy copy {{ local_file_or_folder_path }} 'https://{{ storage_account_name }}.blob.core.windows.net/{{ blob_container_name }}{{ blob_container_folder_path }}{{ storage_account_sas_token }}' --recursive"
+  shell: "azcopy copy {{ local_file_or_folder_path }} 'https://{{ storage_account_name }}.blob.core.windows.net/{{ blob_container_name }}{{ blob_container_folder_path }}{{ container_sas_token }}' --recursive"
   environment:
     AZCOPY_CONCURRENT_FILES: "10"
   async: 10800
diff --git a/ansible/roles/cassandra-backup/defaults/main.yml b/ansible/roles/cassandra-backup/defaults/main.yml
index 139fd1d81028f3e96304458b4aa4a12e536286b9..4481570cc6e7366d127cb8b2238c4bd99565c624 100644
--- a/ansible/roles/cassandra-backup/defaults/main.yml
+++ b/ansible/roles/cassandra-backup/defaults/main.yml
@@ -1,10 +1,4 @@
 cassandra_root_dir: '/etc/cassandra'
 data_dir: '/var/lib/cassandra/data'
-cassandra_backup_azure_container_name: core-cassandra
-
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-cassandra_backup_storage: "{{ cassandra_backup_azure_container_name }}"
\ No newline at end of file
+cloud_storage_cassandrabackup_bucketname: "{{cloud_storage_management_bucketname}}"
+cloud_storage_cassandrabackup_foldername: 'cassandra-backup'
diff --git a/ansible/roles/cassandra-backup/tasks/main.yml b/ansible/roles/cassandra-backup/tasks/main.yml
index 507aeb190b20aab36e1d41176594142e9159d931..0e5ae87477d894572f8f489f5ad8099a5e939d3a 100755
--- a/ansible/roles/cassandra-backup/tasks/main.yml
+++ b/ansible/roles/cassandra-backup/tasks/main.yml
@@ -37,12 +37,12 @@
     name: azure-cloud-storage
     tasks_from: upload-using-azcopy.yml
   vars:
-    blob_container_name: "{{ cassandra_backup_storage }}"
+    blob_container_name: "{{ cloud_storage_cassandrabackup_foldername }}"
     container_public_access: "off"
     blob_container_folder_path: ""
     local_file_or_folder_path: "/data/cassandra/backup/{{ cassandra_backup_folder_name }}"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_sas_token: "{{ azure_management_storage_account_sas }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: upload backup to S3
@@ -51,11 +51,11 @@
     tasks_from: upload-folder.yml
   vars:
     local_file_or_folder_path: "/data/cassandra/backup/{{ cassandra_backup_folder_name }}"
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    s3_path: "{{ cassandra_backup_storage }}"
-    aws_default_region: "{{ aws_region }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
+    s3_bucket_name: "{{ cloud_storage_cassandrabackup_bucketname }}"
+    s3_path: "{{ cloud_storage_cassandrabackup_foldername }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "aws"
   
 - name: upload file to gcloud storage
@@ -63,9 +63,8 @@
     name: gcp-cloud-storage
     tasks_from: upload-batch.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ cassandra_backup_storage }}"
-    dest_folder_path: ""
+    gcp_bucket_name: "{{ cloud_storage_cassandrabackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_cassandrabackup_foldername }}"
     local_file_or_folder_path: "/data/cassandra/backup/{{ cassandra_backup_folder_name }}"
   when: cloud_service_provider == "gcloud"
   
diff --git a/ansible/roles/cassandra-restore/defaults/main.yml b/ansible/roles/cassandra-restore/defaults/main.yml
index 4a4828144e85c4217e7035e5fb2035e2a5590eec..9ac0c38f957323d821bd5f1d4a361f00f83aae6c 100644
--- a/ansible/roles/cassandra-restore/defaults/main.yml
+++ b/ansible/roles/cassandra-restore/defaults/main.yml
@@ -1,8 +1,4 @@
 user_home: "/home/{{ ansible_ssh_user }}/"
 
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-cassandra_backup_storage: "{{ cassandra_backup_azure_container_name }}"
\ No newline at end of file
+cloud_storage_cassandrabackup_bucketname: "{{cloud_storage_management_bucketname}}"
+cloud_storage_cassandrabackup_foldername: 'cassandra-backup'
diff --git a/ansible/roles/cassandra-restore/tasks/main.yml b/ansible/roles/cassandra-restore/tasks/main.yml
index 8a47ab708921f896d3a12935588f07917e4cda4a..4bd8c05991842de54907552e789bb1dc2478bb25 100755
--- a/ansible/roles/cassandra-restore/tasks/main.yml
+++ b/ansible/roles/cassandra-restore/tasks/main.yml
@@ -11,11 +11,11 @@
     name: azure-cloud-storage
     tasks_from: blob-download.yml
   vars:
-    blob_container_name: "{{ cassandra_backup_storage }}"
+    blob_container_name: "{{ cloud_storage_cassandrabackup_foldername }}"
     blob_file_name: "{{ cassandra_restore_gzip_file_name }}"
     local_file_or_folder_path: "{{ cassandra_restore_gzip_file_path  }}"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: download a file from aws s3
@@ -24,12 +24,12 @@
     name: aws-cloud-storage
     tasks_from: download.yml
   vars:
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_cassandrabackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "{{ cassandra_restore_gzip_file_path  }}"
-    s3_path: "{{ cassandra_backup_storage }}/{{ cassandra_restore_gzip_file_name }}"
+    s3_path: "{{ cloud_storage_cassandrabackup_foldername }}/{{ cassandra_restore_gzip_file_name }}"
   when: cloud_service_provider == "aws"  
    
 - name: download file from gcloud storage
@@ -37,9 +37,8 @@
     name: gcp-cloud-storage
     tasks_from: download.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ cassandra_backup_storage }}"
-    dest_file_name: "{{ cassandra_restore_gzip_file_name }}"
+    gcp_bucket_name: "{{ cloud_storage_cassandrabackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_cassandrabackup_foldername }}/{{ cassandra_restore_gzip_file_name }}"
     local_file_or_folder_path: "{{ cassandra_restore_gzip_file_path }}"
   when: cloud_service_provider == "gcloud"
 
diff --git a/ansible/roles/cert-templates/defaults/main.yml b/ansible/roles/cert-templates/defaults/main.yml
index c621d6ddb89a87757294da46874054d1ffe3c9cd..c8710dd9d99e19968ca06bc2bf96a9639d811ac8 100644
--- a/ansible/roles/cert-templates/defaults/main.yml
+++ b/ansible/roles/cert-templates/defaults/main.yml
@@ -2,10 +2,3 @@ certs_badge_upload_retry_count: 3
 certs_badge_criteria: ""
 certs_badge_batch_id: ""
 certs_badge_key_id: ""
-
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-cert_service_storage: "{{ cert_service_container_name }}"
\ No newline at end of file
diff --git a/ansible/roles/cert-templates/tasks/main.yml b/ansible/roles/cert-templates/tasks/main.yml
index 78f1f769b3f86323ee12dd39c81b3a2c356f8272..0caf2b1bfe7cbe940170795e2f77b3ff746c2e22 100644
--- a/ansible/roles/cert-templates/tasks/main.yml
+++ b/ansible/roles/cert-templates/tasks/main.yml
@@ -36,12 +36,12 @@
     name: azure-cloud-storage
     tasks_from: blob-upload-batch.yml
   vars:
-    blob_container_name: "{{ cert_service_storage }}"
+    blob_container_name: "{{ cloud_storage_certservice_bucketname }}"
     container_public_access: "off"
     blob_container_folder_path: ""
     local_file_or_folder_path: "{{ cert_location }}/cert-templates/certUtilScripts/out"
-    storage_account_name: "{{ azure_private_storage_account_name }}"
-    storage_account_key: "{{ azure_private_storage_account_key }}"
+    storage_account_name: "{{ cloud_private_storage_accountname }}"
+    storage_account_key: "{{ cloud_private_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: upload batch of files to aws s3
@@ -49,12 +49,12 @@
     name: aws-cloud-storage
     tasks_from: upload-folder.yml
   vars:
-    s3_bucket_name: "{{ aws_private_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_private_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_private_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_certservice_bucketname }}"
+    aws_access_key_id: "{{ cloud_private_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_private_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "{{ cert_location }}/cert-templates/certUtilScripts/out"
-    s3_path: "{{ cert_service_storage }}"
+    s3_path: ""
   when: cloud_service_provider == "aws"
   
 - name: upload batch of files to gcloud storage
@@ -62,8 +62,7 @@
     name: gcp-cloud-storage
     tasks_from: upload-batch.yml
   vars:
-    dest_folder_name: "{{ cert_service_storage }}"
-    dest_folder_path: ""
+    gcp_bucket_name: "{{ cloud_storage_certservice_bucketname }}"
+    gcp_path: ""
     local_file_or_folder_path: "{{ cert_location }}/cert-templates/certUtilScripts/out"
-    gcp_bucket_name: "{{ gcloud_private_bucket_name }}"
   when: cloud_service_provider == "gcloud"
diff --git a/ansible/roles/desktop-deploy/defaults/main.yml b/ansible/roles/desktop-deploy/defaults/main.yml
index 3010db2349018644d61d0d1410396dbd2f506b0b..2cff6657c72d57e5f811a5ce09627c7f0e572354 100644
--- a/ansible/roles/desktop-deploy/defaults/main.yml
+++ b/ansible/roles/desktop-deploy/defaults/main.yml
@@ -1,10 +1,3 @@
 ---
 time: "YEAR-MONTH-DATE-HOUR-MINUTE-SECOND-INSTALLERTYPE"
 offline_installer_container_name: "{{env}}-offlineinstaller"
-
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-offline_installer_storage: "{{ offline_installer_container_name }}"
\ No newline at end of file
diff --git a/ansible/roles/desktop-deploy/tasks/main.yml b/ansible/roles/desktop-deploy/tasks/main.yml
index 09c41300efbd44cfc6dab09d93b07ecf15de1a84..ba077b778f9bf7292336221d0cb46fb3f4ac5448 100644
--- a/ansible/roles/desktop-deploy/tasks/main.yml
+++ b/ansible/roles/desktop-deploy/tasks/main.yml
@@ -53,10 +53,10 @@
   block:
     - name: set common azure variables
       set_fact:
-        blob_container_name: "{{ offline_installer_storage }}"
+        blob_container_name: "{{ cloud_storage_offlineinstaller_bucketname }}"
         container_public_access: "blob"
-        storage_account_name: "{{ azure_public_storage_account_name }}"
-        storage_account_key: "{{ azure_public_storage_account_key }}"
+        storage_account_name: "{{ cloud_public_storage_accountname }}"
+        storage_account_key: "{{ cloud_public_storage_secret }}"
 
     - name: upload batch of files to azure storage
       include_role:
@@ -81,10 +81,10 @@
   block:
     - name: set common aws variables
       set_fact:
-        s3_bucket_name: "{{ aws_public_s3_bucket_name }}"
-        aws_access_key_id: "{{ aws_public_bucket_access_key }}"
-        aws_secret_access_key: "{{ aws_public_bucket_secret_access_key }}"
-        aws_default_region: "{{ aws_region }}"
+        s3_bucket_name: "{{ cloud_storage_offlineinstaller_bucketname }}"
+        aws_access_key_id: "{{ cloud_public_storage_accountname }}"
+        aws_secret_access_key: "{{ cloud_public_storage_secret }}"
+        aws_default_region: "{{ cloud_public_storage_region }}"
         
     - name: upload batch of files to aws s3
       include_role:
@@ -103,19 +103,19 @@
         local_file_or_folder_path: "{{ offline_repo_location }}/desktop_uploader_assets/{{ time }}/"
   when: cloud_service_provider == "aws"
   
+### GCP Tasks ###   
 - name: this block consists of tasks related to gcloud storage
   block:
     - name: set common gcloud variables
       set_fact:
-        dest_folder_name: "{{ offline_installer_storage }}"
-        gcp_bucket_name: "{{ gcloud_public_bucket_name }}"
+        gcp_bucket_name: "{{ cloud_storage_offlineinstaller_bucketname }}"
 
     - name: upload batch of files to gcloud storage
       include_role:
         name: gcp-cloud-storage
         tasks_from: upload-batch.yml
       vars:
-        dest_folder_path: ""
+        gcp_path: ""
         local_file_or_folder_path: "{{ offline_repo_location }}/desktop_uploader_assets"
 
     - name: upload batch of files to gcloud storage
@@ -123,6 +123,6 @@
         name: gcp-cloud-storage
         tasks_from: upload-batch.yml
       vars:
-        dest_folder_path: "latest"
+        gcp_path: "latest"
         local_file_or_folder_path: "{{ offline_repo_location }}/desktop_uploader_assets/{{ time }}/"
   when: cloud_service_provider == "gcloud"
diff --git a/ansible/roles/es-azure-snapshot/tasks/main.yml b/ansible/roles/es-azure-snapshot/tasks/main.yml
index 8ce0fcd267e92fc8dc79a5a0662ce2f9380081a5..23be535db9c8ecb98f6ec76b5d87b39a2f3eb2c9 100644
--- a/ansible/roles/es-azure-snapshot/tasks/main.yml
+++ b/ansible/roles/es-azure-snapshot/tasks/main.yml
@@ -13,8 +13,8 @@
   vars:
     blob_container_name: "{{ es_backup_storage }}"
     container_public_access: "off"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
 
 - name: Create Azure Repository
   uri:
diff --git a/ansible/roles/es6/tasks/plugins/repository-azure.yml b/ansible/roles/es6/tasks/plugins/repository-azure.yml
index 170a84000eb4d55d5090d58e2ede8a077c857684..dd7fcc3a20cbe0c20abb15db2e55db31b940d9a5 100644
--- a/ansible/roles/es6/tasks/plugins/repository-azure.yml
+++ b/ansible/roles/es6/tasks/plugins/repository-azure.yml
@@ -1,7 +1,7 @@
 ---
 - name: Add default azure account name for backups
   become: yes
-  shell: echo "{{ azure_management_storage_account_name }}" | {{ es_home }}/bin/elasticsearch-keystore add -f azure.client.default.account
+  shell: echo "{{ cloud_management_storage_accountname }}" | {{ es_home }}/bin/elasticsearch-keystore add -f azure.client.default.account
   no_log: True
   environment:
     ES_PATH_CONF: "{{ conf_dir }}"
@@ -9,7 +9,7 @@
 
 - name: Add default azure account key for backups
   become: yes
-  shell: echo "{{ azure_management_storage_account_key }}" | {{ es_home }}/bin/elasticsearch-keystore add -f azure.client.default.key
+  shell: echo "{{ cloud_management_storage_secret }}" | {{ es_home }}/bin/elasticsearch-keystore add -f azure.client.default.key
   no_log: True
   environment:
-    ES_PATH_CONF: "{{ conf_dir }}"
\ No newline at end of file
+    ES_PATH_CONF: "{{ conf_dir }}"
diff --git a/ansible/roles/firebase_deploy/tasks/main.yml b/ansible/roles/firebase_deploy/tasks/main.yml
index 0403ff7dab78b3bc7309dc67a7507acb229dbd11..8ebd281250d82ff26f7127c413a8c7df228383b7 100644
--- a/ansible/roles/firebase_deploy/tasks/main.yml
+++ b/ansible/roles/firebase_deploy/tasks/main.yml
@@ -9,7 +9,7 @@
       src: "uploadToGdrive.sh"
       dest: "./uploadToGdrive.sh"
   - name: Uploading build  to {{ env_name }} firebase
-    shell:  find ../ -maxdepth 1 -iregex ".*{{env_name}}.apk" -exec bash deployToFirebase.sh {} \;
+    shell:  find ../ -maxdepth 1 -iregex ".*{{env_name}}.*.aab" -exec bash deployToFirebase.sh {} \;
     when: env_name!='production'
   - name: Uploading build to {{ env_name }} Gdrive
     shell: find ../ -maxdepth 1 -iregex ".*[0-9].apk" -exec bash uploadToGdrive.sh -v -r {} \;
diff --git a/ansible/roles/gcp-cloud-storage/defaults/main.yml b/ansible/roles/gcp-cloud-storage/defaults/main.yml
index 086cf9c50ddbf452e7cb87dc5c4be6bc73729874..b0fd847b26f2928aac4870f04ba5b072fd1f1245 100644
--- a/ansible/roles/gcp-cloud-storage/defaults/main.yml
+++ b/ansible/roles/gcp-cloud-storage/defaults/main.yml
@@ -10,8 +10,8 @@ gcp_storage_key_file: ""
 
 # Folder name in GCP bucket
 # Example -
-# dest_folder_name: "my-destination-folder"
-dest_folder_name: ""
+# gcp_path: "my-destination-folder"
+gcp_path: ""
 
 # The delete pattern to delete files and folder
 # Example -
@@ -36,7 +36,7 @@ dest_file_name: ""
 
 # The folder path in gcloud storage to upload the files starting from the root of the bucket
 # This path should start with / if we provide a value for this variable since we are going to append this path as below
-# {{ bucket_name }}{{ dest_folder_name }}
+# {{ bucket_name }}{{ gcp_path }}
 # The above translates to "my-bucket/my-folder-path"
 # Example -
 # dest_folder_path: "/my-folder/json-files-folder"
diff --git a/ansible/roles/gcp-cloud-storage/tasks/delete-batch-no-poll.yml b/ansible/roles/gcp-cloud-storage/tasks/delete-batch-no-poll.yml
new file mode 100644
index 0000000000000000000000000000000000000000..ca02b8a064b06f81dccd0e35d538a2a933748ef4
--- /dev/null
+++ b/ansible/roles/gcp-cloud-storage/tasks/delete-batch-no-poll.yml
@@ -0,0 +1,6 @@
+---
+- name: Delete folder recursively in gcp storage
+  shell: gsutil rm -r "gs://{{ gcp_bucket_name }}/{{ file_delete_pattern }}"
+  async: 1800
+  poll: 0
+
diff --git a/ansible/roles/gcp-cloud-storage/tasks/download.yml b/ansible/roles/gcp-cloud-storage/tasks/download.yml
index c8c6e956ad09bf92daa4d6f283504a97ad03c162..73bf76bb049b39522a37c257a54b009798d0e244 100644
--- a/ansible/roles/gcp-cloud-storage/tasks/download.yml
+++ b/ansible/roles/gcp-cloud-storage/tasks/download.yml
@@ -3,9 +3,9 @@
   include_tasks: gcloud-auth.yml
 
 - name: Download from gcloud storage
-  shell: gsutil cp "gs://{{ gcp_bucket_name }}/{{ dest_folder_name }}/{{ dest_file_name }}" "{{ local_file_or_folder_path }}"
+  shell: gsutil cp "gs://{{ gcp_bucket_name }}/{{ gcp_path }}" "{{ local_file_or_folder_path }}"
   async: 3600
   poll: 10
 
 - name: Revoke gcloud access
-  include_tasks: gcloud-revoke.yml
\ No newline at end of file
+  include_tasks: gcloud-revoke.yml
diff --git a/ansible/roles/gcp-cloud-storage/tasks/upload-batch-no-poll.yml b/ansible/roles/gcp-cloud-storage/tasks/upload-batch-no-poll.yml
new file mode 100644
index 0000000000000000000000000000000000000000..40e9b8a66a96440dac3cd538f10215ba3ccb93e6
--- /dev/null
+++ b/ansible/roles/gcp-cloud-storage/tasks/upload-batch-no-poll.yml
@@ -0,0 +1,5 @@
+---
+- name: Upload files from a local directory gcp storage
+  shell: gsutil -m cp -r "{{ local_file_or_folder_path }}" "gs://{{ gcp_bucket_name }}/{{ gcp_path }}"
+  async: 1800
+  poll: 0
diff --git a/ansible/roles/gcp-cloud-storage/tasks/upload-batch.yml b/ansible/roles/gcp-cloud-storage/tasks/upload-batch.yml
index 49abd5b82263de4ba7b3c72954b388e58c0eb755..dc103969aa8758ea680cf7af802c48e2761a8e08 100644
--- a/ansible/roles/gcp-cloud-storage/tasks/upload-batch.yml
+++ b/ansible/roles/gcp-cloud-storage/tasks/upload-batch.yml
@@ -3,7 +3,7 @@
   include_tasks: gcloud-auth.yml
 
 - name: Upload files from a local directory gcp storage
-  shell: gsutil -m cp -r "{{ local_file_or_folder_path }}" "gs://{{ gcp_bucket_name }}/{{ dest_folder_name }}/{{ dest_folder_path }}"
+  shell: gsutil -m cp -r "{{ local_file_or_folder_path }}" "gs://{{ gcp_bucket_name }}/{{ gcp_path}}"
   async: 3600
   poll: 10
 
diff --git a/ansible/roles/gcp-cloud-storage/tasks/upload.yml b/ansible/roles/gcp-cloud-storage/tasks/upload.yml
index 2f88d9407fb3515a1f8942a5527cfe52b32e1cd8..de766a94c747f78a3608f2a86c8f11d0b11c62c7 100644
--- a/ansible/roles/gcp-cloud-storage/tasks/upload.yml
+++ b/ansible/roles/gcp-cloud-storage/tasks/upload.yml
@@ -3,7 +3,7 @@
   include_tasks: gcloud-auth.yml
 
 - name: Upload to gcloud storage
-  shell: gsutil cp "{{ local_file_or_folder_path }}" "gs://{{ gcp_bucket_name }}/{{ dest_folder_name }}/{{ dest_file_name }}"
+  shell: gsutil cp "{{ local_file_or_folder_path }}" "gs://{{ gcp_bucket_name }}/{{ gcp_path }}"
   async: 3600
   poll: 10
 
diff --git a/ansible/roles/grafana-backup/defaults/main.yml b/ansible/roles/grafana-backup/defaults/main.yml
index fc62843964a2fc17d0ca91db7a04355628a12f61..70bd76ff8291662f37172d4ea5d447f5fc5f5148 100644
--- a/ansible/roles/grafana-backup/defaults/main.yml
+++ b/ansible/roles/grafana-backup/defaults/main.yml
@@ -4,11 +4,6 @@ grafana_data_dir: /var/dockerdata/grafana/grafana.db
 # Override these values in group_vars
 sunbird_management_storage_account_name: 
 sunbird_management_storage_account_key: '<backup_storage_key>'
-grafana_backup_azure_container_name: grafana-backup
 
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-grafana_backup_storage: "{{ grafana_backup_azure_container_name }}"
\ No newline at end of file
+cloud_storage_grafanabackup_bucketname: "{{ cloud_storage_management_bucketname }}"
+cloud_storage_grafanabackup_foldername: 'grafana-backup'
diff --git a/ansible/roles/grafana-backup/tasks/main.yml b/ansible/roles/grafana-backup/tasks/main.yml
index 2c8520030c63cef912b6a8e59218d4ab26d31b74..90dc3526caae43d848ad4e2ff04274567c113a11 100644
--- a/ansible/roles/grafana-backup/tasks/main.yml
+++ b/ansible/roles/grafana-backup/tasks/main.yml
@@ -24,12 +24,12 @@
     name: azure-cloud-storage
     tasks_from: blob-upload.yml
   vars:
-    blob_container_name: "{{ grafana_backup_storage }}"
+    blob_container_name: "{{ cloud_storage_grafanabackup_foldername }}"
     container_public_access: "off"
     blob_file_name: "{{ grafana_backup_gzip_file_name }}"
     local_file_or_folder_path: "{{ grafana_backup_gzip_file_path }}"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: upload file to aws s3
@@ -37,12 +37,12 @@
     name: aws-cloud-storage
     tasks_from: upload.yml
   vars:
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_grafanabackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "{{ grafana_backup_gzip_file_path }}"
-    s3_path: "{{ grafana_backup_storage }}/{{ grafana_backup_gzip_file_name }}"
+    s3_path: "{{ cloud_storage_grafanabackup_foldername }}/{{ grafana_backup_gzip_file_name }}"
   when: cloud_service_provider == "aws"
 
 - name: upload file to gcloud storage
@@ -50,9 +50,8 @@
     name: gcp-cloud-storage
     tasks_from: upload.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ grafana_backup_storage }}"
-    dest_file_name: "{{ grafana_backup_gzip_file_name }}"
+    gcp_bucket_name: "{{ cloud_storage_grafanabackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_grafanabackup_foldername }}/{{ grafana_backup_gzip_file_name }}"
     local_file_or_folder_path: "{{ grafana_backup_gzip_file_path }}"
   when: cloud_service_provider == "gcloud"
 
diff --git a/ansible/roles/jenkins-backup-upload/defaults/main.yml b/ansible/roles/jenkins-backup-upload/defaults/main.yml
index 40a231d3d5b881c1baeb0558d5093e9e675c4a39..9fd90050bffa1089a99a1c80696618bfbba5cdca 100644
--- a/ansible/roles/jenkins-backup-upload/defaults/main.yml
+++ b/ansible/roles/jenkins-backup-upload/defaults/main.yml
@@ -1,12 +1,7 @@
 jenkins_user: jenkins
 jenkins_group: jenkins
 jenkins_backup_base_dir: /var/lib/jenkins/jenkins-backup
-jenkins_backup_azure_container_name: jenkins-backup
 jenkins_backup_max_delay_in_days: 1
 
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-jenkins_backup_storage: "{{ jenkins_backup_azure_container_name }}"
\ No newline at end of file
+cloud_storage_jenkinsbackup_bucketname: "{{cloud_storage_management_bucketname}}"
+cloud_storage_jenkinsbackup_foldername: 'jenkins-backup'
diff --git a/ansible/roles/jenkins-backup-upload/tasks/main.yml b/ansible/roles/jenkins-backup-upload/tasks/main.yml
index a94e57fe4af9e0700289ab29430b66a16f960366..89d8f3e29ccd2ee11674e9f9035b1b1454dead4a 100644
--- a/ansible/roles/jenkins-backup-upload/tasks/main.yml
+++ b/ansible/roles/jenkins-backup-upload/tasks/main.yml
@@ -17,12 +17,12 @@
     name: azure-cloud-storage
     tasks_from: blob-upload.yml
   vars:
-    blob_container_name: "{{ jenkins_backup_storage }}"
+    blob_container_name: "{{ cloud_storage_jenkinsbackup_foldername }}"
     container_public_access: "off"
     blob_file_name: "{{ LATEST_BACKUP_DIR.stdout }}.zip"
     local_file_or_folder_path: "/tmp/{{ LATEST_BACKUP_DIR.stdout }}.zip"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: upload file to aws s3
@@ -30,12 +30,12 @@
     name: aws-cloud-storage
     tasks_from: upload.yml
   vars:
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_jenkinsbackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "/tmp/{{ LATEST_BACKUP_DIR.stdout }}.zip"
-    s3_path: "{{ jenkins_backup_storage }}/{{ LATEST_BACKUP_DIR.stdout }}.zip"
+    s3_path: "{{ cloud_storage_jenkinsbackup_foldername }}/{{ LATEST_BACKUP_DIR.stdout }}.zip"
   when: cloud_service_provider == "aws"
   
 - name: upload file to gcloud storage
@@ -43,9 +43,8 @@
     name: gcp-cloud-storage
     tasks_from: upload.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ jenkins_backup_storage  }}"
-    dest_file_name: "{{ LATEST_BACKUP_DIR.stdout }}.zip"
+    gcp_bucket_name: "{{ cloud_storage_jenkinsbackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_jenkinsbackup_foldername  }}/{{ LATEST_BACKUP_DIR.stdout }}.zip"
     local_file_or_folder_path: "/tmp/{{ LATEST_BACKUP_DIR.stdout }}.zip"
   when: cloud_service_provider == "gcloud"
 
diff --git a/ansible/roles/kong-api/defaults/main.yml b/ansible/roles/kong-api/defaults/main.yml
index ff4323914e53522cf2aaf2d704fb62f793a367b6..d7589ae22ed60ca5273d0ad27a3e89418a5b1d39 100644
--- a/ansible/roles/kong-api/defaults/main.yml
+++ b/ansible/roles/kong-api/defaults/main.yml
@@ -127,6 +127,7 @@ assessment_prefix: /assessment
 # Service URLs
 knowledge_mw_service_url: "http://knowledge-mw-service:5000"
 learning_service_url: "http://learner-service:9000"
+dial_service_url: "http://dial-service:9000"
 vm_learning_service_url: "http://{{learningservice_ip}}:8080/learning-service"
 telemetry_service_url: "http://telemetry-service:9001"
 player_service_url: "http://player:3000"
@@ -2494,7 +2495,7 @@ kong_apis:
 
 - name: publishContent
   uris: "{{ content_prefix }}/v1/publish"
-  upstream_url: "{{ knowledge_mw_service_url }}/v1/content/publish"
+  upstream_url: "{{ content_service_url }}/content/v3/publish"
   strip_uri: true
   plugins:
   - name: jwt
@@ -9815,3 +9816,188 @@ kong_apis:
   - name: opa-checks
     config.required: true
     config.enabled: true
+
+- name: fetchAllForm
+  uris: "{{ data_service_prefix }}/v1/form/fetchAll"
+  upstream_url: "{{ player_service_url }}/plugin/v1/form/fetchAll"
+  strip_uri: true
+  plugins:
+  - name: jwt
+  - name: cors
+  - "{{ statsd_pulgin }}"
+  - name: acl
+    config.whitelist:
+    - formUpdate
+  - name: rate-limiting
+    config.policy: local
+    config.hour: "{{ small_rate_limit_per_hour }}"
+    config.limit_by: credential
+  - name: request-size-limiting
+    config.allowed_payload_size: "{{ medium_request_size_limit }}"
+  - name: opa-checks
+    config.required: false
+    config.enabled: false
+
+- name: releaseDialcodeContentV2
+  uris: "{{ content_prefix }}/v2/dialcode/release"
+  upstream_url: "{{ content_service_url }}/content/v4/dialcode/release"
+  strip_uri: true
+  plugins:
+  - name: jwt
+  - name: cors
+  - "{{ statsd_pulgin }}"
+  - name: acl
+    config.whitelist:
+    - contentUpdate
+  - name: rate-limiting
+    config.policy: local
+    config.hour: "{{ medium_rate_limit_per_hour }}"
+    config.limit_by: credential
+  - name: request-size-limiting
+    config.allowed_payload_size: "{{ small_request_size_limit }}"
+  - name: opa-checks
+    config.required: true
+    config.enabled: true
+
+- name: releaseDialcodeCollection
+  uris: "{{ collection_prefix }}/v1/dialcode/release"
+  upstream_url: "{{ content_service_url }}/collection/v4/dialcode/release"
+  strip_uri: true
+  plugins:
+  - name: jwt
+  - name: cors
+  - "{{ statsd_pulgin }}"
+  - name: acl
+    config.whitelist:
+    - contentUpdate
+  - name: rate-limiting
+    config.policy: local
+    config.hour: "{{ medium_rate_limit_per_hour }}"
+    config.limit_by: credential
+  - name: request-size-limiting
+    config.allowed_payload_size: "{{ small_request_size_limit }}"
+  - name: opa-checks
+    config.required: true
+    config.enabled: true
+
+- name: publishContentV2
+  uris: "{{ content_prefix }}/v2/publish"
+  upstream_url: "{{ content_service_url }}/content/v4/publish"
+  strip_uri: true
+  plugins:
+  - name: jwt
+  - name: cors
+  - "{{ statsd_pulgin }}"
+  - name: acl
+    config.whitelist:
+    - contentAdmin
+  - name: rate-limiting
+    config.policy: local
+    config.hour: "{{ medium_rate_limit_per_hour }}"
+    config.limit_by: credential
+  - name: request-size-limiting
+    config.allowed_payload_size: "{{ small_request_size_limit }}"
+  - name: opa-checks
+    config.required: true
+    config.enabled: true
+
+- name: unlistedPublishContent
+  uris: "{{ content_prefix }}/v1/unlisted/publish"
+  upstream_url: "{{ content_service_url }}/content/v3/unlisted/publish"
+  strip_uri: true
+  plugins:
+  - name: jwt
+  - name: cors
+  - "{{ statsd_pulgin }}"
+  - name: acl
+    config.whitelist:
+    - contentAdmin
+  - name: rate-limiting
+    config.policy: local
+    config.hour: "{{ medium_rate_limit_per_hour }}"
+    config.limit_by: credential
+  - name: request-size-limiting
+    config.allowed_payload_size: "{{ small_request_size_limit }}"
+  - name: opa-checks
+    config.required: true
+    config.enabled: true
+
+- name: unlistedPublishContentV2
+  uris: "{{ content_prefix }}/v2/unlisted/publish"
+  upstream_url: "{{ content_service_url }}/content/v4/unlisted/publish"
+  strip_uri: true
+  plugins:
+  - name: jwt
+  - name: cors
+  - "{{ statsd_pulgin }}"
+  - name: acl
+    config.whitelist:
+    - contentAdmin
+  - name: rate-limiting
+    config.policy: local
+    config.hour: "{{ medium_rate_limit_per_hour }}"
+    config.limit_by: credential
+  - name: request-size-limiting
+    config.allowed_payload_size: "{{ small_request_size_limit }}"
+  - name: opa-checks
+    config.required: true
+    config.enabled: true
+
+- name: publishCollection
+  uris: "{{ collection_prefix }}/v1/publish"
+  upstream_url: "{{ content_service_url }}/collection/v4/publish"
+  strip_uri: true
+  plugins:
+  - name: jwt
+  - name: cors
+  - "{{ statsd_pulgin }}"
+  - name: acl
+    config.whitelist:
+    - contentAdmin
+  - name: rate-limiting
+    config.policy: local
+    config.hour: "{{ medium_rate_limit_per_hour }}"
+    config.limit_by: credential
+  - name: request-size-limiting
+    config.allowed_payload_size: "{{ small_request_size_limit }}"
+  - name: opa-checks
+    config.required: true
+    config.enabled: true
+
+- name: unlistedPublishCollection
+  uris: "{{ collection_prefix }}/v1/unlisted/publish"
+  upstream_url: "{{ content_service_url }}/collection/v4/unlisted/publish"
+  strip_uri: true
+  plugins:
+  - name: jwt
+  - name: cors
+  - "{{ statsd_pulgin }}"
+  - name: acl
+    config.whitelist:
+    - contentAdmin
+  - name: rate-limiting
+    config.policy: local
+    config.hour: "{{ medium_rate_limit_per_hour }}"
+    config.limit_by: credential
+  - name: request-size-limiting
+    config.allowed_payload_size: "{{ small_request_size_limit }}"
+  - name: opa-checks
+    config.required: true
+    config.enabled: true
+
+- name: readDIALCodesBatchInfo
+  uris: "{{ dialcode_service_prefix }}/v2/read/batch"
+  upstream_url: "{{ dial_service_url }}/dialcode/v4/batch/read"
+  strip_uri: true
+  plugins:
+  - name: cors
+  - "{{ statsd_pulgin }}"
+  - name: rate-limiting
+    config.policy: local
+    config.hour: "{{ medium_rate_limit_per_hour }}"
+    config.limit_by: ip
+  - name: request-size-limiting
+    config.allowed_payload_size: "{{ small_request_size_limit }}"
+  - name: opa-checks
+    config.required: true
+    config.enabled: true
\ No newline at end of file
diff --git a/ansible/roles/log-es6/tasks/plugins/repository-azure.yml b/ansible/roles/log-es6/tasks/plugins/repository-azure.yml
index 9c3b9d3774cbf917bbe32c427fe20c6a5b299637..43d512803f2b61997ad267cc375bca7f4179aef6 100644
--- a/ansible/roles/log-es6/tasks/plugins/repository-azure.yml
+++ b/ansible/roles/log-es6/tasks/plugins/repository-azure.yml
@@ -1,7 +1,7 @@
 ---
 - name: Add default azure account name for backups
   become: yes
-  shell: echo "{{ azure_management_storage_account_name }}" | {{ es_home }}/bin/elasticsearch-keystore add -f azure.client.default.account
+  shell: echo "{{ cloud_management_storage_accountname }}" | {{ es_home }}/bin/elasticsearch-keystore add -f azure.client.default.account
   no_log: True
   environment:
     ES_PATH_CONF: "{{ es_conf_dir }}"
@@ -9,7 +9,7 @@
 
 - name: Add default azure account key for backups
   become: yes
-  shell: echo "{{ azure_management_storage_account_key }}" | {{ es_home }}/bin/elasticsearch-keystore add -f azure.client.default.key
+  shell: echo "{{ cloud_management_storage_secret }}" | {{ es_home }}/bin/elasticsearch-keystore add -f azure.client.default.key
   no_log: True
   environment:
-    ES_PATH_CONF: "{{ es_conf_dir }}"
\ No newline at end of file
+    ES_PATH_CONF: "{{ es_conf_dir }}"
diff --git a/ansible/roles/ml-analytics-service/defaults/main.yml b/ansible/roles/ml-analytics-service/defaults/main.yml
index 99e7526e4b28e052d4b6a70f12b62026cc362afe..d73099451dae492abf5d41491f666d18569dabff 100755
--- a/ansible/roles/ml-analytics-service/defaults/main.yml
+++ b/ansible/roles/ml-analytics-service/defaults/main.yml
@@ -44,10 +44,8 @@ ml_analytics_kafka_survey_druid_topic_name: "{{ env_name }}.ml.survey.druid"
 ml_analytics_observation_log_folder_path: "{{ WORKDIR }}/logs/observation"
 ml_analytics_project_log_folder_path: "{{ WORKDIR }}/logs/project"
 ml_analytics_survey_log_folder_path: "{{ WORKDIR }}/logs/survey"
-ml_analytics_azure_account_name: "{{ cloud_private_storage_accountname }}"
-ml_analytics_azure_container_name: "{{ cloud_storage_telemetry_bucketname }}"
-ml_analytics_observation_azure_blob_path: "observation/status/"
-ml_analytics_project_azure_blob_path: "projects/"
+ml_analytics_observation_cloud_blob_path: "observation/status/"
+ml_analytics_project_cloud_blob_path: "projects/"
 ml_analytics_redis_host: "{{ml_redis_host | default(groups['dp-redis'][0])}}"
 ml_analytics_redis_port: "{{ ml_redis_device_port | default('6379') }}"
 ml_analytics_redis_db_name: "12"
@@ -55,64 +53,64 @@ ml_analytics_project_output_dir: "{{ WORKDIR }}/source/projects/output"
 ml_analytics_observation_status_output_dir: "{{ WORKDIR }}/source/observations/status/output"
 ml_analytics_api_authorization_key: "{{ml_api_auth_token | default('sunbird_api_auth_token')}}"
 ml_analytics_api_access_token: "{{ml_api_access_token | default('ml_core_internal_access_token')}}"
-ml_analytics_druid_observation_status_injestion_spec : '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type": "azure","uris": ["azure://telemetry-data-store/observation/status/sl_observation_status.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"sl-observation-status","granularitySpec":{"type":"uniform","queryGranularity":"DAY","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"updatedAt","format":"auto"},"dimensionsSpec":{"dimensions":["status","entity_externalId","entity_id","entity_type","solution_id","solution_externalId","submission_id","entity_name","solution_name","role_title","school_name","school_code","school_externalId","state_name","state_code","state_externalId","district_name","district_code","district_externalId","block_name","block_code","block_externalId","cluster_name","cluster_code","cluster_externalId","completedDate","channel","parent_channel","program_id","program_externalId","program_name","app_name","user_id","private_program","solution_type","organisation_name","ecm_marked_na","board_name","updatedAt","organisation_id","user_type","observed_school_name","observed_school_id","observed_school_code","observed_state_name","observed_state_id","observed_state_code","observed_district_name","observed_district_id","observed_district_code","observed_block_name","observed_block_id","observed_block_code","observed_cluster_name","observed_cluster_id","observed_cluster_code"]},"metricsSpec":[]}}}'
+ml_analytics_druid_observation_status_injestion_spec : '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type": "azure","uris": ["azure://telemetry-data-store/observation/status/sl_observation_status.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"sl-observation-status","granularitySpec":{"type":"uniform","queryGranularity":"DAY","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"updatedAt","format":"auto"},"dimensionsSpec":{"dimensions":["status","entity_externalId","entity_id","entity_type","solution_id","solution_externalId","submission_id","entity_name","solution_name","role_title","school_name","school_code","school_externalId","state_name","state_code","state_externalId","district_name","district_code","district_externalId","block_name","block_code","block_externalId","cluster_name","cluster_code","cluster_externalId","completedDate","channel","parent_channel","program_id","program_externalId","program_name","app_name","user_id","private_program","solution_type","organisation_name","ecm_marked_na","board_name","updatedAt","organisation_id","user_type","observed_school_name","observed_school_id","observed_school_code","observed_state_name","observed_state_id","observed_state_code","observed_district_name","observed_district_id","observed_district_code","observed_block_name","observed_block_id","observed_block_code","observed_cluster_name","observed_cluster_id","observed_cluster_code","isRubricDriven","criteriaLevelReport"]},"metricsSpec":[]}}}'
 ml_analytics_druid_project_injestion_spec : '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type": "azure","uris": ["azure://telemetry-data-store/projects/sl_projects.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"sl-project","granularitySpec":{"type":"uniform","queryGranularity":"DAY","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"project_updated_date","format":"auto"},"dimensionsSpec":{"dimensions":[]},"metricsSpec":[]}}}'
-ml_analytics_azure_sas_token: "{{ cloud_private_storage_secret }}"
 ml_analytics_druid_distinctCnt_obs_injestion_spec: '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/observation/distinctCount/ml_observation_distinctCount_status.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-obs-status","granularitySpec":{"type":"uniform","queryGranularity":"none","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"time_stamp","format":"auto"},"dimensionsSpec":{"dimensions":[{"type":"string","name":"program_name"},{"type":"string","name":"program_id"},{"type":"string","name":"solution_name"},{"type":"string","name":"solution_id"},{"type":"string","name":"status"},{"type":"string","name":"state_name"},{"type":"string","name":"state_externalId"},{"type":"string","name":"district_name"},{"type":"string","name":"district_externalId"},{"type":"string","name":"block_name"},{"type":"string","name":"block_externalId"},{"type":"string","name":"organisation_name"},{"type":"string","name":"organisation_id"},{"type":"string","name":"parent_channel"},{"type":"string","name":"solution_type"},{"type":"string","name":"private_program"},{"type":"long","name":"unique_submissions"},{"type":"long","name":"unique_entities"},{"type":"long","name":"unique_users"},{"type":"long","name":"unique_solution"},{"type":"string","name":"time_stamp"}]},"metricsSpec":[]}}}'
 ml_analytics_druid_distinctCnt_obs_domain_injestion_spec: '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/observation/distinctCount_domain/ml_observation_distinctCount_domain.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-obs-domain","granularitySpec":{"type":"uniform","queryGranularity":"none","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"time_stamp","format":"auto"},"dimensionsSpec":{"dimensions":[{"type":"string","name":"program_name"},{"type":"string","name":"program_id"},{"type":"string","name":"solution_name"},{"type":"string","name":"solution_id"},{"type":"string","name":"state_name"},{"type":"string","name":"state_externalId"},{"type":"string","name":"district_name"},{"type":"string","name":"district_externalId"},{"type":"string","name":"block_name"},{"type":"string","name":"block_externalId"},{"type":"string","name":"organisation_name"},{"type":"string","name":"organisation_id"},{"type":"string","name":"parent_channel"},{"type":"string","name":"solution_type"},{"type":"string","name":"private_program"},{"type":"string","name":"domain_name"},{"type":"string","name":"domain_externalId"},{"type":"string","name":"domain_level"},{"type":"long","name":"unique_submissions"},{"type":"long","name":"unique_entities"},{"type":"long","name":"unique_users"},{"type":"long","name":"unique_solution"},{"type":"string","name":"time_stamp"}]},"metricsSpec":[]}}}'
 ml_analytics_druid_distinctCnt_obs_domain_criteria_injestion_spec: '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/observation/distinctCount_domain_criteria/ml_observation_distinctCount_domain_criteria.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-obs-domain-criteria","granularitySpec":{"type":"uniform","queryGranularity":"none","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"time_stamp","format":"auto"},"dimensionsSpec":{"dimensions":[{"type":"string","name":"program_name"},{"type":"string","name":"program_id"},{"type":"string","name":"solution_name"},{"type":"string","name":"solution_id"},{"type":"string","name":"state_name"},{"type":"string","name":"state_externalId"},{"type":"string","name":"district_name"},{"type":"string","name":"district_externalId"},{"type":"string","name":"block_name"},{"type":"string","name":"block_externalId"},{"type":"string","name":"organisation_name"},{"type":"string","name":"organisation_id"},{"type":"string","name":"parent_channel"},{"type":"string","name":"solution_type"},{"type":"string","name":"private_program"},{"type":"string","name":"domain_name"},{"type":"string","name":"domain_externalId"},{"type":"string","name":"domain_level"},{"type":"string","name":"criteria_name"},{"type":"string","name":"criteria_score"},{"type":"string","name":"criteria_id"},{"type":"long","name":"unique_submissions"},{"type":"long","name":"unique_entities"},{"type":"long","name":"unique_users"},{"type":"long","name":"unique_solution"},{"type":"string","name":"time_stamp"}]},"metricsSpec":[]}}}'
-ml_analytics_druid_distinctCnt_projects_status_injestion_spec: '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/projects/distinctCount/ml_projects_distinctCount.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-project-status","granularitySpec":{"type":"uniform","queryGranularity":"none","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"time_stamp","format":"auto"},"dimensionsSpec":{"dimensions":[{"type":"string","name":"program_name"},{"type":"string","name":"program_id"},{"type":"string","name":"project_title"},{"type":"string","name":"solution_id"},{"type":"string","name":"status_of_project"},{"type":"string","name":"state_name"},{"type":"string","name":"state_externalId"},{"type":"string","name":"district_name"},{"type":"string","name":"district_externalId"},{"type":"string","name":"block_name"},{"type":"string","name":"block_externalId"},{"type":"string","name":"organisation_name"},{"type":"string","name":"organisation_id"},{"type":"string","name":"private_program"},{"type":"string","name":"project_created_type"},{"type":"string","name":"parent_channel"},{"type":"long","name":"unique_projects"},{"type":"long","name":"unique_users"},{"type":"long","name":"unique_solution"},{"type":"long","name":"no_of_imp_with_evidence"},{"type":"string","name":"time_stamp"}]},"metricsSpec":[]}}}'
-ml_analytics_obs_distinctCnt_azure_blob_path: "observation/distinctCount/"
-ml_analytics_obs_distinctCnt_domain_azure_blob_path: "observation/distinctCount_domain/"
-ml_analytics_obs_distinctCnt_domain_criteria_azure_blob_path: "observation/distinctCount_domain_criteria/"
-ml_analytics_projects_distinctCnt_azure_blob_path: "projects/distinctCount/"
+ml_analytics_druid_distinctCnt_projects_status_injestion_spec: '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/projects/distinctCount/ml_projects_distinctCount.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-project-status","granularitySpec":{"type":"uniform","queryGranularity":"none","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"time_stamp","format":"auto"},"dimensionsSpec":{"dimensions":[{"type":"string","name":"program_name"},{"type":"string","name":"program_id"},{"type":"string","name":"project_title"},{"type":"string","name":"solution_id"},{"type":"string","name":"status_of_project"},{"type":"string","name":"state_name"},{"type":"string","name":"state_externalId"},{"type":"string","name":"district_name"},{"type":"string","name":"district_externalId"},{"type":"string","name":"block_name"},{"type":"string","name":"block_externalId"},{"type":"string","name":"organisation_name"},{"type":"string","name":"organisation_id"},{"type":"string","name":"private_program"},{"type":"string","name":"project_created_type"},{"type":"string","name":"parent_channel"},{"type":"long","name":"unique_projects"},{"type":"long","name":"unique_users"},{"type":"long","name":"unique_solution"},{"type":"long","name":"no_of_imp_with_evidence"},{"type":"string","name":"time_stamp"},{"type":"long","name":"no_of_certificate_issued"}]},"metricsSpec":[]}}}'
+ml_analytics_obs_distinctCnt_cloud_blob_path: "observation/distinctCount/"
+ml_analytics_obs_distinctCnt_domain_cloud_blob_path: "observation/distinctCount_domain/"
+ml_analytics_obs_distinctCnt_domain_criteria_cloud_blob_path: "observation/distinctCount_domain_criteria/"
+ml_analytics_projects_distinctCnt_cloud_blob_path: "projects/distinctCount/"
 ml_analytics_obs_distinctCnt_output_dir: "{{ WORKDIR }}/source/observations/distinctCount/output"
 ml_analytics_obs_distinctCnt_domain_output_dir: "{{ WORKDIR }}/source/observations/distinctCount_domain/output"
 ml_analytics_obs_distinctCnt_domain_criteria_output_dir: "{{ WORKDIR }}/source/observations/distinctCount_domain_criteria/output"
 ml_analytics_projects_distinctCnt_output_dir: "{{ WORKDIR }}/source/projects/distinctCount/output"
-ml_analytics_survey_rollup_azure_blob_path: "survey/rollup/"
+ml_analytics_survey_rollup_cloud_blob_path: "survey/rollup/"
 ml_analytics_druid_survey_rollup_injestion_spec: '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/survey/rollup/sl_survey_rollup.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-survey-status-rollup","granularitySpec":{"type":"uniform","queryGranularity":"DAY","rollup":true,"segmentGranularity":"DAY"},"timestampSpec":{"column":"updatedAt","format":"iso"},"dimensionsSpec":{"dimensions":["survey_submission_id", "submission_status", "user_id", "user_sub_type", "user_type", "state_externalId", "block_externalId", "district_externalId", "cluster_externalId", "school_externalId", "state_name", "block_name", "district_name", "cluster_name", "school_name", "board_name", "organisation_id", "organisation_name", "program_externalId", "program_id", "program_name", "survey_name", "survey_id", "survey_externalId", "created_date", "submission_date", "updatedAt", "parent_channel",{"type":"long","name":"status_code"}, "solution_name", "solution_id"]},"metricsSpec":[{"name":"count","type":"count"},{"name":"sum___v","type":"longSum","fieldName":"__v"},{"name":"sum_status_code","type":"longMax","fieldName":"status_code"},{"type":"HLLSketchBuild","name":"count_of_user_id","fieldName":"user_id"},{"type":"HLLSketchBuild","name":"count_of_survey_submission_id","fieldName":"survey_submission_id"},{"type":"HLLSketchBuild","name":"count_of_solution_id","fieldName":"solution_id"},{"type":"HLLSketchBuild","name":"count_of_program_id","fieldName":"program_id"}]}}}'
 ml_analytics_survey_rollup_output_dir: "/opt/sparkjobs/source/survey/output"
-ml_analytics_project_rollup_azure_blob_path: "projects/rollup"
-ml_analytics_observation_rollup_azure_blob_path: "observation/rollup"
+ml_analytics_project_rollup_cloud_blob_path: "projects/rollup"
+ml_analytics_observation_rollup_cloud_blob_path: "observation/rollup"
 ml_analytics_project_rollup_output_dir: "/opt/sparkjobs/source/projects/output_rollup"
 ml_analytics_observation_status_rollup_output_dir: "/opt/sparkjobs/source/observations/output_rollup"
 ml_analytics_druid_project_rollup_injestion_spec: '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/projects/rollup/projects_rollup.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-project-status-rollup","granularitySpec":{"type":"uniform","queryGranularity":"DAY","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"project_updated_date","format":"iso"},"dimensionsSpec":{"dimensions":["project_title","project_goal","area_of_improvement","status_of_project","tasks_name","tasks_status","designation","task_evidence_status","project_id","task_id","project_created_type","parent_channel","program_id","program_name","project_updated_date","createdBy","program_externalId","private_program","task_deleted_flag","project_terms_and_condition","state_externalId","block_externalId","district_externalId","cluster_externalId","school_externalId","state_name","block_name","district_name","cluster_name","school_name","board_name","organisation_name","solution_id","organisation_id",{"name":"status_code","type":"long"}]},"metricsSpec":[{"name":"count","type":"count"},{"name":"sum___v","type":"longSum","fieldName":"__v"},{"name":"sum_status_code","type":"longMax","fieldName":"status_code"},{"type":"HLLSketchBuild","name":"count_of_createBy","fieldName":"createdBy"},{"type":"HLLSketchBuild","name":"count_of_project_id","fieldName":"project_id"},{"type":"HLLSketchBuild","name":"count_of_solution_id","fieldName":"solution_id"},{"type":"HLLSketchBuild","name":"count_of_program_id","fieldName":"program_id"}]}}}'
 ml_analytics_druid_observation_status_rollup_injestion_spec: '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/observation/rollup/observation_status_rollup.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-observation-status-rollup","granularitySpec":{"type":"uniform","queryGranularity":"DAY","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"updatedAt","format":"iso"},"dimensionsSpec":{"dimensions":["status","user_id","solution_id","submission_id","entity_name","completedDate","program_id","private_program","solution_type","updatedAt","role_title","solution_name","program_name","channel","parent_channel","block_name","district_name","school_name","cluster_name","state_name","organisation_name","board_name","district_externalId","state_externalId","block_externalId","cluster_externalId","school_externalId","organisation_id",{"type":"long","name":"status_code"}]},"metricsSpec":[{"type":"count","name":"count"},{"type":"longSum","name":"sum___v","fieldName":"__v","expression":null},{"type":"HLLSketchBuild","name":"count_distinct_solution","fieldName":"solution_id","lgK":12,"tgtHllType":"HLL_4","round":false},{"type":"HLLSketchBuild","name":"count_distinct_submission_id","fieldName":"submission_id","lgK":12,"tgtHllType":"HLL_4","round":false},{"type":"HLLSketchBuild","name":"count_distinct_user_id","fieldName":"user_id","lgK":12,"tgtHllType":"HLL_4","round":false}]}}}'
 ml_analytics_druid_rollup_url: "{{groups['druid'][0]}}:8081"
-ml_analytics_AWS_access_key: "{{ cloud_private_storage_accountname }}"
-ml_analytics_AWS_secret_access_key: "{{ cloud_private_storage_secret }}"
-ml_analytics_AWS_region_name: "{{ cloud_private_storage_region }}"
-ml_analytics_AWS_bucket_name: "{{ cloud_storage_telemetry_bucketname }}"
-ml_analytics_druid_distinctCnt_prglevel_projects_status_injestion_spec : '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/projects/distinctCountPrglevel/ml_projects_distinctCount_prgmlevel.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-project-programLevel-status","granularitySpec":{"type":"uniform","queryGranularity":"none","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"time_stamp","format":"auto"},"dimensionsSpec":{"dimensions":[{"type":"string","name":"program_name"},{"type":"string","name":"program_id"},{"type":"string","name":"status_of_project"},{"type":"string","name":"state_name"},{"type":"string","name":"state_externalId"},{"type":"string","name":"private_program"},{"type":"string","name":"project_created_type"},{"type":"string","name":"parent_channel"},{"type":"long","name":"unique_projects"},{"type":"long","name":"unique_users"},{"type":"long","name":"no_of_imp_with_evidence"},{"type":"string","name":"time_stamp"}]},"metricsSpec":[]}}}'
+ml_analytics_druid_distinctCnt_prglevel_projects_status_injestion_spec : '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/projects/distinctCountPrglevel/ml_projects_distinctCount_prgmlevel.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-project-programLevel-status","granularitySpec":{"type":"uniform","queryGranularity":"none","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"time_stamp","format":"auto"},"dimensionsSpec":{"dimensions":[{"type":"string","name":"program_name"},{"type":"string","name":"program_id"},{"type":"string","name":"status_of_project"},{"type":"string","name":"state_name"},{"type":"string","name":"state_externalId"},{"type":"string","name":"private_program"},{"type":"string","name":"project_created_type"},{"type":"string","name":"parent_channel"},{"type":"long","name":"unique_projects"},{"type":"long","name":"unique_users"},{"type":"long","name":"no_of_imp_with_evidence"},{"type":"string","name":"time_stamp"},{"type":"long","name":"no_of_certificate_issued"}]},"metricsSpec":[]}}}'
 ml_analytics_projects_distinctCnt_prglevel_output_dir: "{{ WORKDIR }}/source/projects/distinctCountPrglevel/output"
-ml_analytics_projects_distinctCnt_prglevel_azure_blob_path: "projects/distinctCountPrglevel/"
+ml_analytics_projects_distinctCnt_prglevel_cloud_blob_path: "projects/distinctCountPrglevel/"
 ml_analytics_survey_status_output_dir : "{{ WORKDIR }}/source/survey/status/output"
-ml_analytics_survey_azure_blob_path : "survey/status/"
-ml_analytics_druid_survey_status_injestion_spec : '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/survey/status/sl_survey_status.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-survey-status","granularitySpec":{"type":"uniform","queryGranularity":"DAY","rollup":true,"segmentGranularity":"DAY"},"timestampSpec":{"column":"updatedAt","format":"iso"},"dimensionsSpec":{"dimensions":["survey_submission_id", "submission_status", "user_id", "user_sub_type", "user_type", "state_externalId", "block_externalId", "district_externalId", "cluster_externalId", "school_externalId", "state_name", "block_name", "district_name", "cluster_name", "school_name", "board_name", "organisation_id", "organisation_name", "program_externalId", "program_id", "program_name", "survey_name", "survey_id", "survey_externalId", "created_date", "submission_date", "updatedAt", "parent_channel", "solution_name", "solution_id","private_program"]},"metricsSpec":[]}}}'
-ml_analytics_slack_token: "{{ ml_slack_token | default('') }}"
-ml_analytics_channel_name: "{{ ml_slack_channel | default('') }}"
-ml_analytics_program_dashboard_azure_blob_path: "{{ ml_program_blob_path | default('') }}"
+ml_analytics_survey_cloud_blob_path : "survey/status/"
+ml_analytics_druid_survey_status_injestion_spec : '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris":["azure://telemetry-data-store/survey/status/sl_survey_status.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"ml-survey-status","granularitySpec":{"type":"uniform","queryGranularity":"DAY","rollup":true,"segmentGranularity":"DAY"},"timestampSpec":{"column":"updatedAt","format":"iso"},"dimensionsSpec":{"dimensions":["survey_submission_id", "submission_status", "user_id", "user_sub_type", "user_type", "state_externalId", "block_externalId", "district_externalId", "cluster_externalId", "school_externalId", "state_name", "block_name", "district_name", "cluster_name", "school_name", "board_name", "organisation_id", "organisation_name", "program_externalId", "program_id", "program_name", "survey_name", "survey_id", "survey_externalId", "created_date", "submission_date", "updatedAt", "parent_channel", "solution_name", "solution_id","private_program","state_code","school_code","district_code","block_code","cluster_code"]},"metricsSpec":[]}}}'
+ml_slack_token: "{{ ml_analytics_slack_token | default('') }}"
+ml_slack_channel: "{{ ml_analytics_slack_channel | default('') }}"
+ml_analytics_program_dashboard_cloud_blob_path: "{{ ml_program_blob_path | default('') }}"
 ml_druid_query_data: "{{ ml_druid_query | default('') }}"
 ml_program_dashboard_data: "{{ ml_program_data | default('') }}"
 ml_analytics_druid_query_url: "{{groups['druid'][0]}}:8082"
 ml_analytics_druid_observation_query_spec: '{"queryType":"scan","dataSource":"sl-observation","resultFormat":"list","columns":["completedDate","createdAt","createdBy","criteriaExternalId","criteriaId","criteriaName","entityType","entityTypeId","observationId","observationName","observationSubmissionId","questionAnswer","questionECM","questionExternalId","questionId","questionName","questionResponseLabel","questionResponseType","solutionExternalId","solutionId","solutionName","updatedAt","instanceParentId","instanceId","instanceParentResponsetype","instanceParentQuestion","questionSequenceByEcm","maxScore","minScore","percentageScore","pointsBasedScoreInParent","totalScore","scoreAchieved","totalpercentage","instanceParentExternalId","instanceParentEcmSequence","remarks","total_evidences","evidence_count","school","block","district","cluster","state","schoolName","blockName","districtName","clusterName","stateName","schoolExternalId","blockExternalId","districtExternalId","clusterExternalId","stateExternalId","schoolTypes","administrationTypes","instanceParentCriteriaId","instanceParentCriteriaExternalId","instanceParentCriteriaName","role_title","location_validated_with_geotag","distance_in_meters","entity","entityExternalId","entityName","isAPrivateProgram","programId","programName","programExternalId","questionResponseLabel_number","criteriaLevel","criteriaScore","submissionNumber","submissionTitle","channel","parent_channel","user_districtName","user_blockName","user_clusterName","appName","evidences","user_stateName","domainName","domainExternalId","childName","childType","childExternalid","level","criteriaDescription","programDescription","solutionDescription","label","imp_project_id","imp_project_title","imp_project_goal","imp_project_externalId","ancestorName","scoringSystem","domainLevel","domainScore","criteriaLevelReport","user_schoolName","user_schoolId","user_schoolUDISE_code","solution_type","organisation_name","user_boardName","district_externalId","state_externalId","block_externalId","cluster_externalId","organisation_id","user_type"],"intervals":["1901-01-01T00:00:00+00:00/2101-01-01T00:00:00+00:00"]}'
 ml_analytics_druid_observation_batch_ingestion_spec: '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type": "azure","uris": ["azure://telemetry-data-store/observation/batchDeletion/druidData.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"sl-observation","granularitySpec":{"type":"uniform","queryGranularity":"DAY","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"completedDate","format":"auto"},"dimensionsSpec":{"dimensions":[{"type":"string","name":"completedDate"},{"type":"string","name":"createdAt"},{"type":"string","name":"createdBy"},{"type":"string","name":"criteriaExternalId"},{"type":"string","name":"criteriaId"},{"type":"string","name":"criteriaName"},{"type":"string","name":"entityType"},{"type":"string","name":"entityTypeId"},{"type":"string","name":"observationId"},{"type":"string","name":"observationName"},{"type":"string","name":"observationSubmissionId"},{"type":"string","name":"questionAnswer"},{"type":"string","name":"questionECM"},{"type":"string","name":"questionExternalId"},{"type":"string","name":"questionId"},{"type":"string","name":"questionName"},{"type":"string","name":"questionResponseLabel"},{"type":"string","name":"questionResponseType"},{"type":"string","name":"solutionExternalId"},{"type":"string","name":"solutionId"},{"type":"string","name":"solutionName"},{"type":"string","name":"updatedAt"},{"type":"string","name":"instanceParentId"},{"type":"string","name":"instanceId"},{"type":"string","name":"instanceParentResponsetype"},{"type":"string","name":"instanceParentQuestion"},{"type":"string","name":"questionSequenceByEcm"},{"type":"string","name":"maxScore"},{"type":"string","name":"minScore"},{"type":"string","name":"percentageScore"},{"type":"string","name":"pointsBasedScoreInParent"},{"type":"string","name":"totalScore"},{"type":"string","name":"scoreAchieved"},{"type":"string","name":"totalpercentage"},{"type":"string","name":"instanceParentExternalId"},{"type":"string","name":"instanceParentEcmSequence"},{"type":"string","name":"remarks"},{"type":"string","name":"total_evidences"},{"type":"string","name":"evidence_count"},{"type":"string","name":"school"},{"type":"string","name":"block"},{"type":"string","name":"district"},{"type":"string","name":"cluster"},{"type":"string","name":"state"},{"type":"string","name":"schoolName"},{"type":"string","name":"blockName"},{"type":"string","name":"districtName"},{"type":"string","name":"clusterName"},{"type":"string","name":"stateName"},{"type":"string","name":"schoolExternalId"},{"type":"string","name":"blockExternalId"},{"type":"string","name":"districtExternalId"},{"type":"string","name":"clusterExternalId"},{"type":"string","name":"stateExternalId"},{"type":"string","name":"schoolTypes"},{"type":"string","name":"administrationTypes"},{"type":"string","name":"instanceParentCriteriaId"},{"type":"string","name":"instanceParentCriteriaExternalId"},{"type":"string","name":"instanceParentCriteriaName"},{"type":"string","name":"role_title"},{"type":"string","name":"location_validated_with_geotag"},{"type":"string","name":"distance_in_meters"},{"type":"string","name":"entity"},{"type":"string","name":"entityExternalId"},{"type":"string","name":"entityName"},{"type":"string","name":"isAPrivateProgram"},{"type":"string","name":"programId"},{"type":"string","name":"programName"},{"type":"string","name":"programExternalId"},{"name":"questionResponseLabel_number","type":"float"},{"type":"string","name":"criteriaLevel"},{"type":"string","name":"criteriaScore"},{"type":"string","name":"submissionNumber"},{"type":"string","name":"submissionTitle"},{"type":"string","name":"channel"},{"type":"string","name":"parent_channel"},{"type":"string","name":"user_districtName"},{"type":"string","name":"user_blockName"},{"type":"string","name":"user_clusterName"},{"type":"string","name":"appName"},{"type":"string","name":"evidences"},{"type":"string","name":"user_stateName"},{"type":"string","name":"domainName"},{"type":"string","name":"domainExternalId"},{"type":"string","name":"childName"},{"type":"string","name":"childType"},{"type":"string","name":"childExternalid"},{"type":"string","name":"level"},{"type":"string","name":"criteriaDescription"},{"type":"string","name":"programDescription"},{"type":"string","name":"solutionDescription"},{"type":"string","name":"label"},{"type":"string","name":"imp_project_id"},{"type":"string","name":"imp_project_title"},{"type":"string","name":"imp_project_goal"},{"type":"string","name":"imp_project_externalId"},{"type":"string","name":"ancestorName"},{"type":"string","name":"scoringSystem"},{"type":"string","name":"domainLevel"},{"type":"string","name":"domainScore"},{"name":"criteriaLevelReport","type":"boolean"},{"type":"string","name":"user_schoolName"},{"type":"string","name":"user_schoolId"},{"type":"string","name":"user_schoolUDISE_code"},{"type":"string","name":"solution_type"},{"type":"string","name":"organisation_name"},{"type":"string","name":"user_boardName"},{"type":"string","name":"district_externalId"},{"type":"string","name":"state_externalId"},{"type":"string","name":"block_externalId"},{"type":"string","name":"cluster_externalId"},{"type":"string","name":"organisation_id"},{"type":"string","name":"user_type"},{"type":"string","name":"isSubmissionDeleted"}]},"metricsSpec":[{"type":"floatSum","name":"question_response_number","fieldName":"questionResponseLabel_number"}]}}}'
-ml_analytics_observation_batchupdate_azure_blob_path: "observation/batchDeletion"
+ml_analytics_observation_batchupdate_cloud_blob_path: "observation/batchDeletion"
 ml_analytics_observation_submission_id_filepath: "{{ WORKDIR }}/ml-analytics-service/observations/submissions.csv"
 ml_analytics_observation_batchupdate_output_dir: "{{ WORKDIR }}/source/observations/"
 ml_analytics_druid_survey_query_spec : '{"queryType":"scan","dataSource":"sl-survey","resultFormat":"list","columns":["completedDate","createdAt","createdBy","criteriaExternalId","criteriaId","criteriaName","surveyId","surveyName","surveySubmissionId","questionAnswer","questionECM","questionExternalId","questionId","questionName","questionResponseLabel","questionResponseType","solutionExternalId","solutionId","solutionName","updatedAt","instanceParentId","instanceId","instanceParentResponsetype","instanceParentQuestion","questionSequenceByEcm","maxScore","minScore","percentageScore","pointsBasedScoreInParent","totalScore","scoreAchieved","totalpercentage","instanceParentExternalId","instanceParentEcmSequence","remarks","total_evidences","evidence_count","instanceParentCriteriaId","instanceParentCriteriaExternalId","instanceParentCriteriaName","isAPrivateProgram","programId","programName","programExternalId","questionResponseLabel_number","channel","parent_channel","appName","organisation_name","user_subtype","user_type","board_name","district_code","district_name","district_externalId","block_code","block_name","block_externalId","school_code","school_name","school_externalId","cluster_code","cluster_name","cluster_externalId","state_code","state_name","state_externalId","organisation_id","evidences"],"intervals":["1901-01-01T00:00:00+00:00/2101-01-01T00:00:00+00:00"]}'
 ml_analytics_druid_survey_batch_ingestion_spec : '{"type":"index","spec":{"ioConfig":{"type":"index","inputSource":{"type":"azure","uris": ["azure://telemetry-data-store/survey/batchDeletion/druidData.json"]},"inputFormat":{"type":"json"}},"tuningConfig":{"type":"index","partitionsSpec":{"type":"dynamic"}},"dataSchema":{"dataSource":"sl-survey","granularitySpec":{"type":"uniform","queryGranularity":"DAY","rollup":false,"segmentGranularity":"DAY"},"timestampSpec":{"column":"completedDate","format":"auto"},"dimensionsSpec":{"dimensions":[{"type":"string","name":"completedDate"},{"type":"string","name":"createdAt"},{"type":"string","name":"createdBy"},{"type":"string","name":"criteriaExternalId"},{"type":"string","name":"criteriaId"},{"type":"string","name":"criteriaName"},{"type":"string","name":"surveyId"},{"type":"string","name":"surveyName"},{"type":"string","name":"surveySubmissionId"},{"type":"string","name":"questionAnswer"},{"type":"string","name":"questionECM"},{"type":"string","name":"questionExternalId"},{"type":"string","name":"questionId"},{"type":"string","name":"questionName"},{"type":"string","name":"questionResponseLabel"},{"type":"string","name":"questionResponseType"},{"type":"string","name":"solutionExternalId"},{"type":"string","name":"solutionId"},{"type":"string","name":"solutionName"},{"type":"string","name":"updatedAt"},{"type":"string","name":"instanceParentId"},{"type":"string","name":"instanceId"},{"type":"string","name":"instanceParentResponsetype"},{"type":"string","name":"instanceParentQuestion"},{"type":"string","name":"questionSequenceByEcm"},{"type":"string","name":"maxScore"},{"type":"string","name":"minScore"},{"type":"string","name":"percentageScore"},{"type":"string","name":"pointsBasedScoreInParent"},{"type":"string","name":"totalScore"},{"type":"string","name":"scoreAchieved"},{"type":"string","name":"totalpercentage"},{"type":"string","name":"instanceParentExternalId"},{"type":"string","name":"instanceParentEcmSequence"},{"type":"string","name":"remarks"},{"type":"string","name":"total_evidences"},{"type":"string","name":"evidence_count"},{"type":"string","name":"evidences"},{"type":"string","name":"instanceParentCriteriaId"},{"type":"string","name":"instanceParentCriteriaExternalId"},{"type":"string","name":"instanceParentCriteriaName"},{"type":"string","name":"isAPrivateProgram"},{"type":"string","name":"programId"},{"type":"string","name":"programName"},{"type":"string","name":"programExternalId"},{"name":"questionResponseLabel_number","type":"float"},{"type":"string","name":"channel"},{"type":"string","name":"parent_channel"},{"type":"string","name":"appName"},{"type":"string","name":"organisation_name"},{"type":"string","name":"user_subtype"},{"type":"string","name":"user_type"},{"type":"string","name":"board_name"},{"type":"string","name":"district_code"},{"type":"string","name":"district_name"},{"type":"string","name":"district_externalId"},{"type":"string","name":"block_code"},{"type":"string","name":"block_name"},{"type":"string","name":"block_externalId"},{"type":"string","name":"school_code"},{"type":"string","name":"school_name"},{"type":"string","name":"school_externalId"},{"type":"string","name":"cluster_code"},{"type":"string","name":"cluster_name"},{"type":"string","name":"cluster_externalId"},{"type":"string","name":"state_code"},{"type":"string","name":"state_name"},{"type":"string","name":"state_externalId"},{"type":"string","name":"organisation_id"},{"type":"string","name":"isSubmissionDeleted"}]},"metricsSpec":[{"type":"floatSum","name":"question_response_number","fieldName":"questionResponseLabel_number"}]}}}'
-ml_analytics_survey_batchupdate_azure_blob_path : "survey/batchDeletion"
+ml_analytics_survey_batchupdate_cloud_blob_path : "survey/batchDeletion"
 ml_analytics_survey_submission_id_filepath : "{{ WORKDIR }}/ml-analytics-service/survey/submissions.csv"
 ml_analytics_survey_batchupdate_output_dir : "{{ WORKDIR }}/source/survey/"
 ml_analytics_druid_interval_list: '["1901-01-01T00:00:00+00:00/2020-01-01T00:00:00+00:00","2020-01-01T00:00:00+00:00/2020-06-01T00:00:00+00:00","2020-06-01T00:00:00+00:00/2021-01-01T00:00:00+00:00","2021-01-01T00:00:00+00:00/2021-06-01T00:00:00+00:00","2021-06-01T00:00:00+00:00/2022-01-01T00:00:00+00:00","2022-01-01T00:00:00+00:00/2022-03-01T00:00:00+00:00","2022-03-01T00:00:00+00:00/2022-06-01T00:00:00+00:00","2022-06-01T00:00:00+00:00/2022-09-01T00:00:00+00:00","2022-09-01T00:00:00+00:00/2023-01-01T00:00:00+00:00"]'
-ml_analytics_azure_account_key: "{{ cloud_private_storage_secret }}"
 ML_Cloud_Service_Provider: "{{ cloud_service_provider | default('azure') }}" ## Valid options - ORACLE, gcloud, aws & azure
 ml_analytics_cloud_package_path: "{{ config_path }}/cloud_storage"
-ml_analytics_cname_url: "https://{{ cloud_private_storage_accountname }}.blob.core.windows.net/{{ cloud_storage_telemetry_bucketname }}"
-ml_GCP_secret_json_file: "gcp_secrets.json"
-ml_GCP_bucket_name: "{{ cloud_storage_telemetry_bucketname }}"
-ml_GCP_Secrets:
-  account_name: "{{ cloud_private_storage_accountname }}"
-  account_key: "{{ cloud_private_storage_secret }}"
+ml_analytics_cname_url: "{{ cloud_storage_url }}/{{ cloud_storage_samiksha_bucketname }}"
+ml_Cloud_secret_json_file: "cloud_secrets.json"
+ml_Cloud_Secrets:
+  account_name: "{{ cloud_public_storage_accountname }}"
+  account_key: "{{ cloud_public_storage_secret }}"
+cloud_public_storage_accountname: "{{ cloud_public_storage_accountname }}"
+cloud_storage_telemetry_bucketname: "{{ cloud_storage_telemetry_bucketname }}"
+cloud_public_storage_secret: "{{ cloud_public_storage_secret }}"
+cloud_public_storage_region: "{{ cloud_public_storage_region }}"
+cloud_public_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
+ml_analytics_project_program : "{{ WORKDIR }}/ml-analytics-service/projects/program_ids.txt"
+ml_analytics_projects_program_filename: "{{ config_path }}/projects/program_ids.txt"
diff --git a/ansible/roles/ml-analytics-service/tasks/main.yml b/ansible/roles/ml-analytics-service/tasks/main.yml
index dfa015c99cb9ca8146990c61386dd63553b57815..ee609b8806cc716c3afb84649faf483d4b51ff15 100755
--- a/ansible/roles/ml-analytics-service/tasks/main.yml
+++ b/ansible/roles/ml-analytics-service/tasks/main.yml
@@ -106,7 +106,7 @@
     backup: yes
 
 - name: Copy GCP Secrets to JSON file
-  copy: dest="{{config_path}}/{{ml_GCP_secret_json_file}}" content="{{ ml_GCP_Secrets | to_nice_json}}" mode=0400 owner="{{ USER }}"
+  copy: dest="{{config_path}}/{{ml_Cloud_secret_json_file}}" content="{{ ml_Cloud_Secrets | to_nice_json}}" mode=0400 owner="{{ USER }}"
   when: ML_Cloud_Service_Provider == 'gcloud'
 
 - name: Templating the shell_script_config.j2 to shell_script_config
diff --git a/ansible/roles/ml-analytics-service/templates/config.j2 b/ansible/roles/ml-analytics-service/templates/config.j2
index 770de394cb87fc77b8fa940bf075fbb53f6f079b..52927ec957c83068efe07d8429a3753decf8d219 100644
--- a/ansible/roles/ml-analytics-service/templates/config.j2
+++ b/ansible/roles/ml-analytics-service/templates/config.j2
@@ -154,50 +154,49 @@ survey_streaming_error = {{ ml_analytics_survey_log_folder_path }}/error.log
 
 [ORACLE]
 
-endpoint_url = {{ ml_ORACLE_endpoint_url }}
+endpoint_url = {{ cloud_public_storage_endpoint }}
 
-access_key = {{ ml_ORACLE_access_key }}
+access_key = {{ cloud_public_storage_accountname }}
 
-secret_access_key = {{ ml_ORACLE_secret_access_key }}
+secret_access_key = {{ cloud_public_storage_secret }}
 
-region_name = {{ ml_ORACLE_region_name }}
+region_name = {{ cloud_public_storage_region }}
 
-bucket_name = {{ ml_ORACLE_bucket_name }}
+bucket_name = {{ cloud_storage_telemetry_bucketname }}
 
 {% elif ML_Cloud_Service_Provider is eq 'gcloud' %}
 
 [GCP]
 
-secret_data = {{ ml_GCP_secret_json_file }}
+secret_data = {{ ml_Cloud_secret_json_file }}
 
-bucket_name = {{ ml_GCP_bucket_name }}
+bucket_name = {{ cloud_storage_telemetry_bucketname }}
 
 {% elif ML_Cloud_Service_Provider is eq 'aws' %}
 
 [AWS]
 
-service_name = {{ ml_AWS_service_name }}
+service_name = S3
 
-access_key = {{ ml_AWS_access_key }}
+access_key = {{ cloud_public_storage_accountname }}
 
-secret_access_key = {{ ml_AWS_secret_access_key }}
+secret_access_key = {{ cloud_public_storage_secret }}
 
-region_name = {{ ml_AWS_region_name }}
+region_name = {{ cloud_public_storage_region }}
 
-bucket_name = {{ ml_AWS_bucket_name }}
+bucket_name = {{ cloud_storage_telemetry_bucketname }}
 
 {% else %}
 
 [AZURE]
 
-account_name = {{ ml_analytics_azure_account_name }}
+account_name = {{ cloud_public_storage_accountname }}
 
-sas_token = {{ ml_analytics_azure_sas_token }}
+container_name = {{ cloud_storage_telemetry_bucketname }}
 
-container_name = {{ ml_analytics_azure_container_name }}
-
-account_key = {{ ml_analytics_azure_account_key }}
+account_key = {{ cloud_public_storage_secret }}
 
+{% endif %}
 
 [OUTPUT_DIR]
 
@@ -231,6 +230,7 @@ survey_sub_ids = {{ml_analytics_survey_submission_id_filepath}}
 
 survey_druid_data = {{ml_analytics_survey_batchupdate_output_dir}}
 
+program_text_file = {{ml_analytics_project_program}}
 
 [SLACK]
 
@@ -250,32 +250,32 @@ program_dashboard_data = {{ ml_program_dashboard_data }}
 
 cloud_module_path = {{ ml_analytics_cloud_package_path }}
 
-observation_blob_path = {{ ml_analytics_observation_azure_blob_path }}
+observation_blob_path = {{ ml_analytics_observation_cloud_blob_path }}
 
-projects_blob_path = {{ ml_analytics_project_azure_blob_path }}
+projects_blob_path = {{ ml_analytics_project_cloud_blob_path }}
 
-observation_distinctCount_blob_path = {{ ml_analytics_obs_distinctCnt_azure_blob_path }}
+observation_distinctCount_blob_path = {{ ml_analytics_obs_distinctCnt_cloud_blob_path }}
 
-observation_distinctCount_domain_blob_path = {{ ml_analytics_obs_distinctCnt_domain_azure_blob_path }}
+observation_distinctCount_domain_blob_path = {{ ml_analytics_obs_distinctCnt_domain_cloud_blob_path }}
 
-observation_distinctCount_domain_criteria_blob_path = {{ ml_analytics_obs_distinctCnt_domain_criteria_azure_blob_path }}
+observation_distinctCount_domain_criteria_blob_path = {{ ml_analytics_obs_distinctCnt_domain_criteria_cloud_blob_path }}
 
-projects_distinctCnt_blob_path = {{ ml_analytics_projects_distinctCnt_azure_blob_path }}
+projects_distinctCnt_blob_path = {{ ml_analytics_projects_distinctCnt_cloud_blob_path }}
 
-projects_distinctCnt_prgmlevel_blob_path = {{ ml_analytics_projects_distinctCnt_prglevel_azure_blob_path }}
+projects_distinctCnt_prgmlevel_blob_path = {{ ml_analytics_projects_distinctCnt_prglevel_cloud_blob_path }}
 
-projects_rollup_blob_path = {{ ml_analytics_project_rollup_azure_blob_path }}
+projects_rollup_blob_path = {{ ml_analytics_project_rollup_cloud_blob_path }}
 
-observation_rollup_blob_path = {{ ml_analytics_observation_rollup_azure_blob_path }}
+observation_rollup_blob_path = {{ ml_analytics_observation_rollup_cloud_blob_path }}
 
-survey_rollup_blob_path = {{ ml_analytics_survey_rollup_azure_blob_path }}
+survey_rollup_blob_path = {{ ml_analytics_survey_rollup_cloud_blob_path }}
 
-survey_blob_path = {{ ml_analytics_survey_azure_blob_path }}
+survey_blob_path = {{ ml_analytics_survey_cloud_blob_path }}
 
-projects_program_csv = {{ ml_analytics_program_dashboard_azure_blob_path }}
+projects_program_csv = {{ ml_analytics_program_dashboard_cloud_blob_path }}
 
-observation_batch_ingestion_data_del = {{ ml_analytics_observation_batchupdate_azure_blob_path }}
+observation_batch_ingestion_data_del = {{ ml_analytics_observation_batchupdate_cloud_blob_path }}
 
-survey_batch_ingestion_data_del = {{ ml_analytics_survey_batchupdate_azure_blob_path}}
+survey_batch_ingestion_data_del = {{ ml_analytics_survey_batchupdate_cloud_blob_path}}
 
 cname_url = {{ ml_analytics_cname_url }}
diff --git a/ansible/roles/ml-analytics-service/templates/shell_script_config.j2 b/ansible/roles/ml-analytics-service/templates/shell_script_config.j2
index 6ecdeba31a3cd73c4d8d6a8e561a81ccf7bd9b1b..97e35a4db03657d872da072039d5893d793084dc 100644
--- a/ansible/roles/ml-analytics-service/templates/shell_script_config.j2
+++ b/ansible/roles/ml-analytics-service/templates/shell_script_config.j2
@@ -1,2 +1 @@
-mongo_url={{ ml_analytics_mongodb_url }}
-mongo_db_name={{ ml_analytics_mongo_db_name }}
+projects_program_filename={{ ml_analytics_projects_program_filename }}
diff --git a/ansible/roles/mongodb-backup/defaults/main.yml b/ansible/roles/mongodb-backup/defaults/main.yml
index da5a0f710fbb1e40510973d01fc1793dcc170db2..547137f0ca4e6291de2d42f518e09e12af8e0c16 100644
--- a/ansible/roles/mongodb-backup/defaults/main.yml
+++ b/ansible/roles/mongodb-backup/defaults/main.yml
@@ -1,9 +1,4 @@
 mongo_backup_dir: '/tmp/mongo-backup'
-mongo_backup_azure_container_name: "mongodb-backup"
 
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-mongo_backup_storage: "{{ mongo_backup_azure_container_name }}"
\ No newline at end of file
+cloud_storage_mongodbbackup_bucketname: "{{cloud_storage_management_bucketname}}"
+cloud_storage_mongodbbackup_foldername: 'mongodb-backup'
diff --git a/ansible/roles/mongodb-backup/tasks/main.yml b/ansible/roles/mongodb-backup/tasks/main.yml
index 0762f2754f48ff76fd55802cdba850533807cb79..f51216b14fb959176ed1ba19425ae7fc2a11d427 100644
--- a/ansible/roles/mongodb-backup/tasks/main.yml
+++ b/ansible/roles/mongodb-backup/tasks/main.yml
@@ -19,12 +19,12 @@
     name: azure-cloud-storage
     tasks_from: blob-upload.yml
   vars:
-    blob_container_name: "{{ mongo_backup_storage }}"
+    blob_container_name: "{{ cloud_storage_mongodbbackup_foldername }}"
     container_public_access: "off"
     blob_file_name: "{{ mongo_backup_file_name }}.tar.gz"
     local_file_or_folder_path: "{{ mongo_backup_file_path }}.tar.gz"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: upload file to aws s3
@@ -32,10 +32,10 @@
     name: aws-cloud-storage
     tasks_from: upload.yml
   vars:
-    s3_bucket_name: "{{ aws_public_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_public_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_public_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_mongodbbackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "{{ mongo_backup_file_path }}.tar.gz"
     s3_path: "{{ mongo_backup_storage }}/{{ mongo_backup_file_name }}.tar.gz"
   when: cloud_service_provider == "aws"
@@ -45,9 +45,8 @@
     name: gcp-cloud-storage
     tasks_from: upload.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ mongo_backup_storage }}"
-    dest_file_name: "{{ mongo_backup_file_name }}.tar.gz"
+    gcp_bucket_name: "{{ cloud_storage_mongodbbackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_mongodbbackup_foldername }}/{{ mongo_backup_file_name }}.tar.gz"
     local_file_or_folder_path: "{{ mongo_backup_file_path }}.tar.gz"
   when: cloud_service_provider == "gcloud"
  
diff --git a/ansible/roles/postgres-managed-service-backup/defaults/main.yml b/ansible/roles/postgres-managed-service-backup/defaults/main.yml
index 6e637bf3cee262397c88cc35d3cb9aacc96cfcff..ed62efd66be2900805b6f4b76168a5b529eec5b2 100644
--- a/ansible/roles/postgres-managed-service-backup/defaults/main.yml
+++ b/ansible/roles/postgres-managed-service-backup/defaults/main.yml
@@ -1,7 +1,5 @@
 postgresql_user: postgres
 postgresql_backup_dir: /tmp/postgres
-postgresql_backup_azure_container_name: postgresql-backup
-
 db_name:
   db: ['keycloak', 'api_manager_{{ postgres_env }}', 'quartz']
 
@@ -9,9 +7,5 @@ postgres_admin_user: "{{sunbird_pg_user}}"
 postgres_hostname: "{{groups['postgresql-master-1'][0]}}"
 postgres_password: "{{postgres_password}}"
 
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-postgresql_backup_storage: "{{ postgresql_backup_azure_container_name }}"
\ No newline at end of file
+cloud_storage_postgresqlbackup_bucketname: "{{ cloud_storage_management_bucketname }}"
+cloud_storage_postgresqlbackup_foldername: postgresql-backup
diff --git a/ansible/roles/postgres-managed-service-backup/tasks/main.yml b/ansible/roles/postgres-managed-service-backup/tasks/main.yml
index ea206146b343da5bb319a3628ae5daf014dbb0f2..ba101e2509d5f60d316ae66d251fd0884ba85aa0 100644
--- a/ansible/roles/postgres-managed-service-backup/tasks/main.yml
+++ b/ansible/roles/postgres-managed-service-backup/tasks/main.yml
@@ -46,12 +46,12 @@
     name: azure-cloud-storage
     tasks_from: blob-upload.yml
   vars:
-    blob_container_name: "{{ postgresql_backup_storage }}"
+    blob_container_name: "{{ cloud_storage_postgresqlbackup_foldername }}"
     container_public_access: "off"
     blob_file_name: "{{ postgresql_backup_gzip_file_name }}.zip"
     local_file_or_folder_path: "{{ postgresql_backup_gzip_file_path }}"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: upload file to aws s3
@@ -59,12 +59,12 @@
     name: aws-cloud-storage
     tasks_from: upload.yml
   vars:
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_postgresqlbackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "{{ postgresql_backup_gzip_file_path }}"
-    s3_path: "{{ postgresql_backup_storage }}/{{ postgresql_backup_gzip_file_name }}.zip"
+    s3_path: "{{ cloud_storage_postgresqlbackup_foldername }}/{{ postgresql_backup_gzip_file_name }}.zip"
   when: cloud_service_provider == "aws"
    
 - name: upload file to gcloud storage
@@ -72,9 +72,8 @@
     name: gcp-cloud-storage
     tasks_from: upload.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ postgresql_backup_storage }}"
-    dest_file_name: "{{ postgresql_backup_gzip_file_name }}.zip"
+    gcp_bucket_name: "{{ cloud_storage_postgresqlbackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_postgresqlbackup_foldername }}/{{ postgresql_backup_gzip_file_name }}.zip"
     local_file_or_folder_path: "{{ postgresql_backup_gzip_file_path }}"
   when: cloud_service_provider == "gcloud"
 
diff --git a/ansible/roles/postgres-managed-service-restore/defaults/main.yml b/ansible/roles/postgres-managed-service-restore/defaults/main.yml
index 4ac0d621510f71672b23532f28fc2b033e1683e5..88934250007d9e1fc8cc4a0ff54faaf21d55ffc6 100644
--- a/ansible/roles/postgres-managed-service-restore/defaults/main.yml
+++ b/ansible/roles/postgres-managed-service-restore/defaults/main.yml
@@ -1,6 +1,4 @@
 postgresql_restore_dir: /tmp/postgres-restore
-postgres_backup_azure_container_name: postgresql-backup
-
 db:
   name: ['keycloak', 'api_manager_{{ postgres_env }}', 'quartz']
   role: ['keycloak', 'api_manager_{{ postgres_env }}', 'quartz']
@@ -13,9 +11,5 @@ postgres_password:
 postgres_hostname:
 postgres_env:
 
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-postgres_backup_storage: "{{ postgres_backup_azure_container_name }}"
\ No newline at end of file
+cloud_storage_postgresqlbackup_bucketname: "{{ cloud_storage_management_bucketname }}"
+cloud_storage_postgresqlbackup_foldername: postgresql-backup
diff --git a/ansible/roles/postgres-managed-service-restore/tasks/main.yml b/ansible/roles/postgres-managed-service-restore/tasks/main.yml
index 0299ff3f735ac90762f020e4039aaff1bef87085..58d2c53482ced745416c5f99c3ad3d97972d40fa 100644
--- a/ansible/roles/postgres-managed-service-restore/tasks/main.yml
+++ b/ansible/roles/postgres-managed-service-restore/tasks/main.yml
@@ -14,11 +14,11 @@
     name: azure-cloud-storage
     tasks_from: blob-download.yml
   vars:
-    blob_container_name: "{{ postgres_backup_storage }}"
+    blob_container_name: "{{ cloud_storage_postgresqlbackup_foldername }}"
     blob_file_name: "{{ postgres_backup_filename }}"
     local_file_or_folder_path: "{{ postgres_restore_dir }}/{{ postgres_backup_filepath }}"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: download a file from aws s3
@@ -26,12 +26,12 @@
     name: aws-cloud-storage
     tasks_from: download.yml
   vars:
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_postgresqlbackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "{{ postgres_restore_dir }}/{{ postgres_backup_filepath }}"
-    s3_path: "{{ postgres_backup_storage }}/{{ postgres_backup_filename }}"
+    s3_path: "{{ cloud_storage_postgresqlbackup_foldername }}/{{ postgres_backup_filename }}"
   when: cloud_service_provider == "aws"
    
 - name: download file from gcloud storage
@@ -39,9 +39,8 @@
     name: gcp-cloud-storage
     tasks_from: download.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ postgres_backup_storage }}"
-    dest_file_name: "{{ postgres_backup_filename }}"
+    gcp_bucket_name: "{{ cloud_storage_postgresqlbackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_postgresqlbackup_foldername }}/{{ postgres_backup_filename }}"
     local_file_or_folder_path: "{{ postgres_restore_dir }}/{{ postgres_backup_filepath }}"
   when: cloud_service_provider == "gcloud"
 
diff --git a/ansible/roles/postgresql-backup/defaults/main.yml b/ansible/roles/postgresql-backup/defaults/main.yml
index 0b6a9bca4a8f7e3f04ab3565a8672cacb0a08e7d..341b1c23edb86166088f78b6002fa3e4a722036c 100644
--- a/ansible/roles/postgresql-backup/defaults/main.yml
+++ b/ansible/roles/postgresql-backup/defaults/main.yml
@@ -1,10 +1,5 @@
 postgresql_backup_dir: /tmp/postgresql-backup
 postgresql_user: postgres
-postgresql_backup_azure_container_name: postgresql-backup
 
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-postgresql_backup_storage: "{{ postgresql_backup_azure_container_name }}"
+cloud_storage_postgresqlbackup_bucketname: "{{ cloud_storage_management_bucketname }}"
+cloud_storage_postgresqlbackup_foldername: postgresql-backup
diff --git a/ansible/roles/postgresql-backup/tasks/main.yml b/ansible/roles/postgresql-backup/tasks/main.yml
index 65116bede0e8c17457dcbaf7ddf5d49712fbf009..5b3303bf975e449568a9edfd6cdc041cf93e9180 100644
--- a/ansible/roles/postgresql-backup/tasks/main.yml
+++ b/ansible/roles/postgresql-backup/tasks/main.yml
@@ -18,12 +18,12 @@
     name: azure-cloud-storage
     tasks_from: blob-upload.yml
   vars:
-    blob_container_name: "{{ postgresql_backup_storage }}"
+    blob_container_name: "{{ cloud_storage_postgresqlbackup_foldername }}"
     container_public_access: "off"
     blob_file_name: "{{ postgresql_backup_gzip_file_name }}"
     local_file_or_folder_path: "{{ postgresql_backup_gzip_file_path }}"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: upload file to aws s3
@@ -31,12 +31,12 @@
     name: aws-cloud-storage
     tasks_from: upload.yml
   vars:
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_postgresqlbackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "{{ postgresql_backup_gzip_file_path }}"
-    s3_path: "{{ postgresql_backup_storage }}/{{ postgresql_backup_gzip_file_name }}"
+    s3_path: "{{ cloud_storage_postgresqlbackup_foldername }}/{{ postgresql_backup_gzip_file_name }}"
   when: cloud_service_provider == "aws"
      
 - name: upload file to gcloud storage
@@ -44,11 +44,10 @@
     name: gcp-cloud-storage
     tasks_from: upload.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ postgresql_backup_storage }}"
-    dest_file_name: "{{ postgresql_backup_gzip_file_name }}"
+    gcp_bucket_name: "{{ cloud_storage_postgresqlbackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_postgresqlbackup_foldername }}/{{ postgresql_backup_gzip_file_name }}"
     local_file_or_folder_path: "{{ postgresql_backup_gzip_file_path }}"
   when: cloud_service_provider == "gcloud"
 
 - name: clean up backup dir after upload
-  file: path="{{ postgresql_backup_dir }}" state=absent
\ No newline at end of file
+  file: path="{{ postgresql_backup_dir }}" state=absent
diff --git a/ansible/roles/postgresql-restore/defaults/main.yml b/ansible/roles/postgresql-restore/defaults/main.yml
index 5f0708ed347edbd887bd1f7a70027ee8c275295c..feeed7d6bbaaebd672854e1af81c0a22841315c2 100644
--- a/ansible/roles/postgresql-restore/defaults/main.yml
+++ b/ansible/roles/postgresql-restore/defaults/main.yml
@@ -3,11 +3,6 @@ postgresql_user: postgres
 postgresql_port: 5432
 postgresql_cluster_version: 9.5
 postgresql_cluster_name: main
-postgresql_restore_azure_container_name: postgresql-backup
 
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-postgresql_restore_storage: "{{ postgresql_restore_azure_container_name }}"
\ No newline at end of file
+cloud_storage_postgresqlbackup_bucketname: "{{ cloud_storage_management_bucketname }}"
+cloud_storage_postgresqlbackup_foldername: postgresql-backup
diff --git a/ansible/roles/postgresql-restore/tasks/main.yml b/ansible/roles/postgresql-restore/tasks/main.yml
index 877e1789876e4b32f03d097a07612a23db81d58b..e076590f23ca99e1613caafead1478b37d098788 100644
--- a/ansible/roles/postgresql-restore/tasks/main.yml
+++ b/ansible/roles/postgresql-restore/tasks/main.yml
@@ -9,11 +9,11 @@
     name: azure-cloud-storage
     tasks_from: blob-download.yml
   vars:
-    blob_container_name: "{{ postgresql_restore_storage }}"
+    blob_container_name: "{{ cloud_storage_postgresqlbackup_foldername }}"
     blob_file_name: "{{ postgresql_restore_gzip_file_name }}"
     local_file_or_folder_path: "{{ postgresql_restore_gzip_file_path  }}"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: download a file from aws s3
@@ -21,12 +21,12 @@
     name: aws-cloud-storage
     tasks_from: download.yml
   vars:
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_postgresqlbackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "{{ postgresql_restore_gzip_file_path  }}"
-    s3_path: "{{ postgres_backup_storage }}/{{ postgresql_restore_gzip_file_name }}"
+    s3_path: "{{ cloud_storage_postgresqlbackup_foldername }}/{{ postgresql_restore_gzip_file_name }}"
   when: cloud_service_provider == "aws"
   
 - name: download file from gcloud storage
@@ -34,9 +34,8 @@
     name: gcp-cloud-storage
     tasks_from: download.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ postgresql_restore_storage }}"
-    dest_file_name: "{{ postgresql_restore_gzip_file_name }}"
+    gcp_bucket_name: "{{ cloud_storage_postgresqlbackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_postgresqlbackup_foldername }}/{{ postgresql_restore_gzip_file_name }}"
     local_file_or_folder_path: "{{ postgresql_restore_gzip_file_path }}"
   when: cloud_service_provider == "gcloud"
 
diff --git a/ansible/roles/prometheus-backup-v2/defaults/main.yml b/ansible/roles/prometheus-backup-v2/defaults/main.yml
index e3752a693f48287d29fc1bdcaaed2bb606d63bfb..919dcd82d92a08dc36b7e0d042f004943bdd43a3 100644
--- a/ansible/roles/prometheus-backup-v2/defaults/main.yml
+++ b/ansible/roles/prometheus-backup-v2/defaults/main.yml
@@ -1,10 +1,5 @@
 ---
 # defaults file for ansible/roles/prometheus-backup-v2
-prometheus_backup_azure_container_name: prometheus-backup
 
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-prometheus_backup_storage: "{{ prometheus_backup_azure_container_name }}"
\ No newline at end of file
+cloud_storage_prometheusbackup_bucketname: "{{ cloud_storage_management_bucketname }}"
+cloud_storage_prometheusbackup_foldername: prometheus-backup
diff --git a/ansible/roles/prometheus-backup-v2/tasks/main.yml b/ansible/roles/prometheus-backup-v2/tasks/main.yml
index 3831080dbc26ecbdf6c398abdef9aabe571b021c..4a65bb6f8fe58927281b8e442948d8bbca416398 100644
--- a/ansible/roles/prometheus-backup-v2/tasks/main.yml
+++ b/ansible/roles/prometheus-backup-v2/tasks/main.yml
@@ -25,8 +25,8 @@
     container_public_access: "off"
     blob_file_name: "{{ prometheus_backup_prefix }}_{{ snapshot_name }}.tar.gz"
     local_file_or_folder_path: "/tmp/{{ prometheus_backup_prefix }}_{{ snapshot_name }}.tar.gz"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: upload file to aws s3
@@ -34,12 +34,12 @@
     name: aws-cloud-storage
     tasks_from: upload.yml
   vars:
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_prometheusbackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "/tmp/{{ prometheus_backup_prefix }}_{{ snapshot_name }}.tar.gz"
-    s3_path: "{{ prometheus_backup_storage }}/{{ prometheus_backup_prefix }}_{{ snapshot_name }}.tar.gz"
+    s3_path: "{{ cloud_storage_prometheusbackup_foldername }}/{{ prometheus_backup_prefix }}_{{ snapshot_name }}.tar.gz"
   when: cloud_service_provider == "aws"
   
 - name: upload file to gcloud storage
@@ -47,9 +47,8 @@
     name: gcp-cloud-storage
     tasks_from: upload.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ prometheus_backup_storage }}"
-    dest_file_name: "{{ prometheus_backup_prefix }}_{{ snapshot_name }}.tar.gz"
+    gcp_bucket_name: "{{ cloud_storage_prometheusbackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_prometheusbackup_foldername }}/{{ prometheus_backup_prefix }}_{{ snapshot_name }}.tar.gz"
     local_file_or_folder_path: "/tmp/{{ prometheus_backup_prefix }}_{{ snapshot_name }}.tar.gz"
   when: cloud_service_provider == "gcloud"
 
diff --git a/ansible/roles/prometheus-backup/defaults/main.yml b/ansible/roles/prometheus-backup/defaults/main.yml
index 17425092ee607b4e46f18784cfc91dbcb49460ef..e5a4ecdcb33ad08f4a40f9e19ef59a777970cc2f 100644
--- a/ansible/roles/prometheus-backup/defaults/main.yml
+++ b/ansible/roles/prometheus-backup/defaults/main.yml
@@ -1,14 +1,8 @@
 prometheus_backup_dir: /tmp/prometheus-backup
-prometheus_backup_azure_container_name: prometheus-backup
-
 # Set these vars per environment as show in example below
 # Override these values in group_vars
 backup_storage_name: backups
 backup_storage_key: '<backup_storage_key>'
 
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-prometheus_backup_storage: "{{ prometheus_backup_azure_container_name }}"
\ No newline at end of file
+cloud_storage_prometheusbackup_bucketname: "{{ cloud_storage_management_bucketname }}"
+cloud_storage_prometheusbackup_foldername: prometheus-backup
diff --git a/ansible/roles/prometheus-backup/tasks/main.yml b/ansible/roles/prometheus-backup/tasks/main.yml
index 55a51287ae3ef1a6cbb7aa0498b58a3143b3f430..10d8e2fb3b2728976fc91bf5285581c93036a458 100644
--- a/ansible/roles/prometheus-backup/tasks/main.yml
+++ b/ansible/roles/prometheus-backup/tasks/main.yml
@@ -30,12 +30,12 @@
     name: azure-cloud-storage
     tasks_from: blob-upload.yml
   vars:
-    blob_container_name: "{{ prometheus_backup_storage }}"
+    blob_container_name: "{{ cloud_storage_prometheusbackup_foldername }}"
     container_public_access: "off"
     blob_file_name: "{{ prometheus_backup_gzip_file_name }}"
     local_file_or_folder_path: "{{ prometheus_backup_gzip_file_path }}"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: upload file to aws s3
@@ -43,12 +43,12 @@
     name: aws-cloud-storage
     tasks_from: upload.yml
   vars:
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_prometheusbackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "{{ prometheus_backup_gzip_file_path }}"
-    s3_path: "{{ prometheus_backup_storage }}/{{ prometheus_backup_gzip_file_name }}"
+    s3_path: "{{ cloud_storage_prometheusbackup_foldername }}/{{ prometheus_backup_gzip_file_name }}"
   when: cloud_service_provider == "aws" 
   
 - name: upload file to gcloud storage
@@ -56,9 +56,8 @@
     name: gcp-cloud-storage
     tasks_from: upload.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ prometheus_backup_storage }}"
-    dest_file_name: "{{ prometheus_backup_gzip_file_name }}"
+    gcp_bucket_name: "{{ cloud_storage_prometheusbackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_prometheusbackup_foldername }}/{{ prometheus_backup_gzip_file_name }}"
     local_file_or_folder_path: "{{ prometheus_backup_gzip_file_path }}"
   when: cloud_service_provider == "gcloud"
 
diff --git a/ansible/roles/prometheus-restore/defaults/main.yml b/ansible/roles/prometheus-restore/defaults/main.yml
index bee405457a6c88b121bf2605a6b3c3e174182ee5..f5f1511216273255f4764cb78040730408e6dad4 100644
--- a/ansible/roles/prometheus-restore/defaults/main.yml
+++ b/ansible/roles/prometheus-restore/defaults/main.yml
@@ -1,9 +1,3 @@
 prometheus_backup_dir: /tmp/prometheus-backup
-prometheus_backup_azure_container_name: prometheus-backup
-
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-prometheus_backup_storage: "{{ prometheus_backup_azure_container_name }}"
\ No newline at end of file
+cloud_storage_prometheusbackup_bucketname: "{{ cloud_storage_management_bucketname }}"
+cloud_storage_prometheusbackup_foldername: prometheus-backup
diff --git a/ansible/roles/prometheus-restore/tasks/main.yml b/ansible/roles/prometheus-restore/tasks/main.yml
index 2232770fdd70f0e93231cf961a710868487e13e7..440b777fe47e60b29e8540b5b85ea627f586dc0e 100644
--- a/ansible/roles/prometheus-restore/tasks/main.yml
+++ b/ansible/roles/prometheus-restore/tasks/main.yml
@@ -7,11 +7,11 @@
     name: azure-cloud-storage
     tasks_from: blob-download.yml
   vars:
-    blob_container_name: "{{ prometheus_backup_storage }}"
+    blob_container_name: "{{ cloud_storage_prometheusbackup_foldername }}"
     blob_file_name: "{{ prometheus_backup_filename }}"
     local_file_or_folder_path: "{{ prometheus_backup_dir }}/{{ prometheus_backup_filepath }}"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: download a file from aws s3
@@ -19,12 +19,12 @@
     name: aws-cloud-storage
     tasks_from: download.yml
   vars:
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_prometheusbackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "{{ prometheus_backup_dir }}/{{ prometheus_backup_filepath }}"
-    s3_path: "{{ prometheus_backup_storage }}/{{ prometheus_backup_filename }}"
+    s3_path: "{{ cloud_storage_prometheusbackup_foldername }}/{{ prometheus_backup_filename }}"
   when: cloud_service_provider == "aws"
 
 - name: download file from gcloud storage
@@ -32,9 +32,8 @@
     name: gcp-cloud-storage
     tasks_from: download.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ prometheus_backup_storage }}"
-    dest_file_name: "{{ prometheus_backup_filename }}"
+    gcp_bucket_name: "{{ cloud_storage_prometheusbackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_prometheusbackup_foldername }}/{{ prometheus_backup_filename }}"
     local_file_or_folder_path: "{{ prometheus_backup_dir }}/{{ prometheus_backup_filepath }}"
   when: cloud_service_provider == "gcloud"
 
diff --git a/ansible/roles/redis-backup/defaults/main.yml b/ansible/roles/redis-backup/defaults/main.yml
index 9f6055682a195cbd798c41be3417c7a55e08b7d4..54b7c60a8953fcd2fe269106071e776d2ea06404 100644
--- a/ansible/roles/redis-backup/defaults/main.yml
+++ b/ansible/roles/redis-backup/defaults/main.yml
@@ -1,10 +1,5 @@
 redis_backup_dir: /tmp/redis-backup
-nodebb_redis_backup_azure_container_name: nodebb-redis-backup
 learner_user: learning
 
-# This variable is added for the below reason -
-# 1. Introduce a common variable for various clouds. In case of azure, it refers to container name, in case of aws / gcp, it refers to folder name
-# 2. We want to avoid too many new variable introduction / replacement in first phase. Hence we will reuse the existing variable defined in private repo
-#    or other default files and just assign the value to the newly introduced common variable 
-# 3. After few releases, we will remove the older variables and use only the new variables across the repos
-nodebb_redis_backup_storage: "{{ nodebb_redis_backup_azure_container_name }}"
\ No newline at end of file
+cloud_storage_redisbackup_bucketname: "{{ cloud_storage_management_bucketname }}"
+cloud_storage_redisbackup_foldername: nodebb-redis-backup
diff --git a/ansible/roles/redis-backup/tasks/main.yml b/ansible/roles/redis-backup/tasks/main.yml
index 5359a362c81bf93bd27cbfdfca2cc62d3c29b2f1..f1cf35622f6620ea729aaeaed59e58fa1457a738 100644
--- a/ansible/roles/redis-backup/tasks/main.yml
+++ b/ansible/roles/redis-backup/tasks/main.yml
@@ -18,12 +18,12 @@
     name: azure-cloud-storage
     tasks_from: blob-upload.yml
   vars:
-    blob_container_name: "{{ nodebb_redis_backup_storage }}"
+    blob_container_name: "{{ cloud_storage_redisbackup_foldername }}"
     container_public_access: "off"
     blob_file_name: "{{ redis_backup_file_name }}"
     local_file_or_folder_path: "{{ redis_backup_file_path }}"
-    storage_account_name: "{{ azure_management_storage_account_name }}"
-    storage_account_key: "{{ azure_management_storage_account_key }}"
+    storage_account_name: "{{ cloud_management_storage_accountname }}"
+    storage_account_key: "{{ cloud_management_storage_secret }}"
   when: cloud_service_provider == "azure"
 
 - name: upload file to aws s3
@@ -31,12 +31,12 @@
     name: aws-cloud-storage
     tasks_from: upload.yml
   vars:
-    s3_bucket_name: "{{ aws_management_s3_bucket_name }}"
-    aws_access_key_id: "{{ aws_management_bucket_access_key }}"
-    aws_secret_access_key: "{{ aws_management_bucket_secret_access_key }}"
-    aws_default_region: "{{ aws_region }}"
+    s3_bucket_name: "{{ cloud_storage_redisbackup_bucketname }}"
+    aws_access_key_id: "{{ cloud_management_storage_accountname }}"
+    aws_secret_access_key: "{{ cloud_management_storage_secret }}"
+    aws_default_region: "{{ cloud_public_storage_region }}"
     local_file_or_folder_path: "{{ redis_backup_file_path }}"
-    s3_path: "{{ nodebb_redis_backup_storage }}/{{ redis_backup_file_name }}"
+    s3_path: "{{ cloud_storage_redisbackup_foldername }}/{{ redis_backup_file_name }}"
   when: cloud_service_provider == "aws"
    
 - name: upload file to gcloud storage
@@ -44,9 +44,8 @@
     name: gcp-cloud-storage
     tasks_from: upload.yml
   vars:
-    gcp_bucket_name: "{{ gcloud_management_bucket_name }}"
-    dest_folder_name: "{{ nodebb_redis_backup_storage }}"
-    dest_file_name: "{{ redis_backup_file_name }}"
+    gcp_bucket_name: "{{ cloud_storage_redisbackup_bucketname }}"
+    gcp_path: "{{ cloud_storage_redisbackup_foldername }}/{{ redis_backup_file_name }}"
     local_file_or_folder_path: "{{ redis_backup_file_path }}"
   when: cloud_service_provider == "gcloud"
 
diff --git a/ansible/roles/stack-sunbird/defaults/main.yml b/ansible/roles/stack-sunbird/defaults/main.yml
index 8b600f04a1c0edd082e0dd84113c5e5b8ccbe0fd..036fda51bdef3c86e7ef0be8195d512c42ccb3c5 100644
--- a/ansible/roles/stack-sunbird/defaults/main.yml
+++ b/ansible/roles/stack-sunbird/defaults/main.yml
@@ -1048,3 +1048,18 @@ kong_desktop_device_consumer_names_for_opa: '["desktop"]'
 # Audience claim check is disabled as of now
 # List of keycloak clients as these can come in audience field of a JWT token
 # keycloak_allowed_aud: '"{{ keycloak_auth_server_url }}/realms/{{ keycloak_realm }}", "account", "realm-management"'
+
+
+cloudstorage_relative_path_prefix_content: "CONTENT_STORAGE_BASE_PATH"
+cloudstorage_relative_path_prefix_dial: "DIAL_STORAGE_BASE_PATH"
+cloudstorage_metadata_list: '["appIcon", "artifactUrl", "posterImage", "previewUrl", "thumbnail", "assetsMap", "certTemplate", "itemSetPreviewUrl", "grayScaleAppIcon", "sourceURL", "variants", "downloadUrl", "streamingUrl", "toc_url", "data", "question", "solutions", "editorState", "media", "pdfUrl"]'
+
+### inQuiry assessment service default values
+inquiry_schema_path: "{{ kp_schema_base_path }}"
+inquiry_kafka_urls: "{{ kafka_urls }}"
+inquiry_assessment_import_kafka_topic_name: "{{ env_name }}.object.import.request"
+inquiry_assessment_publish_kafka_topic_name: "{{ env_name }}.assessment.publish.request"
+inquiry_cassandra_connection: "{{ lp_cassandra_connection }}"
+inquiry_cassandra_keyspace_prefix: "{{ lp_cassandra_keyspace_prefix }}"
+inquiry_redis_host: "{{ sunbird_lp_redis_host }}"
+inquiry_search_service_base_url: "{{ sunbird_search_service_api_base_url }}/v3/search"
\ No newline at end of file
diff --git a/ansible/roles/stack-sunbird/templates/assessment-service_application.conf b/ansible/roles/stack-sunbird/templates/assessment-service_application.conf
index 60d129907ad51315e2a6041f71e7be6f6c53ad9a..c06a44f2d803f123808c9db6efa972a10a0b28ba 100644
--- a/ansible/roles/stack-sunbird/templates/assessment-service_application.conf
+++ b/ansible/roles/stack-sunbird/templates/assessment-service_application.conf
@@ -336,14 +336,13 @@ play.filters {
 
 play.http.parser.maxMemoryBuffer = 50MB
 akka.http.parsing.max-content-length = 50MB
-schema.base_path="{{ kp_schema_base_path | default('/home/sunbird/assessment-service-1.0-SNAPSHOT/schemas')}}"
+schema.base_path="{{ inquiry_schema_path | default('/home/sunbird/assessment-service-1.0-SNAPSHOT/schemas')}}"
 
 # Cassandra Configuration
-cassandra.lp.connection="{{ lp_cassandra_connection }}"
-content.keyspace = "{{ lp_cassandra_keyspace_prefix }}_content_store"
+cassandra.lp.connection="{{ inquiry_cassandra_connection }}"
 
 # Redis Configuration
-redis.host="{{ sunbird_lp_redis_host }}"
+redis.host="{{ inquiry_redis_host }}"
 redis.port=6379
 redis.maxConnections=128
 
@@ -383,30 +382,24 @@ languageCode {
   telugu : "te"
 }
 
-cloud_storage_type: "{{ cloud_service_provider }}"
-cloud_storage_key: "{{ cloud_public_storage_accountname }}"
-cloud_storage_secret: "{{ cloud_public_storage_secret }}"
-cloud_storage_endpoint: "{{ cloud_public_storage_endpoint }}"
-cloud_storage_container: "{{ cloud_storage_content_bucketname }}"
-
 kafka {
-  urls : "{{ kafka_urls }}"
+  urls : "{{ inquiry_kafka_urls }}"
   topic.send.enable : true
-  topics.instruction : "{{ env_name }}.assessment.publish.request"
+  topics.instruction : "{{ inquiry_assessment_publish_kafka_topic_name }}"
 }
-objectcategorydefinition.keyspace="{{ lp_cassandra_keyspace_prefix }}_category_store"
-question.keyspace="{{ lp_cassandra_keyspace_prefix }}_question_store"
-questionset.keyspace="{{ lp_cassandra_keyspace_prefix }}_hierarchy_store"
+objectcategorydefinition.keyspace="{{ inquiry_cassandra_keyspace_prefix }}_category_store"
+question.keyspace="{{ inquiry_cassandra_keyspace_prefix }}_question_store"
+questionset.keyspace="{{ inquiry_cassandra_keyspace_prefix }}_hierarchy_store"
 
 composite {
   search {
-    url : "{{ sunbird_search_service_api_base_url }}/v3/search"
+    url : "{{ inquiry_search_service_base_url }}"
   }
 }
 
 import {
   request_size_limit : 300
-  output_topic_name : "{{ env_name }}.object.import.request"
+  output_topic_name : "{{ inquiry_assessment_import_kafka_topic_name }}"
   required_props {
     question : ["name", "code", "mimeType", "framework", "channel"]
     questionset : ["name", "code", "mimeType", "framework", "channel"]
@@ -426,4 +419,14 @@ assessment.copy.props_to_remove=["downloadUrl", "artifactUrl", "variants",
   "LastPublishedBy", "rejectReasons", "rejectComment", "gradeLevel", "subject",
   "medium", "board", "topic", "purpose", "subtopic", "contentCredits",
   "owner", "collaborators", "creators", "contributors", "badgeAssertions", "dialcodes",
-  "concepts", "keywords", "reservedDialcodes", "dialcodeRequired", "leafNodes", "sYS_INTERNAL_LAST_UPDATED_ON", "prevStatus", "lastPublishedBy", "streamingUrl"]
\ No newline at end of file
+  "concepts", "keywords", "reservedDialcodes", "dialcodeRequired", "leafNodes", "sYS_INTERNAL_LAST_UPDATED_ON", "prevStatus", "lastPublishedBy", "streamingUrl"]
+
+cloud_storage_container: "{{ cloud_storage_content_bucketname }}"
+
+cloudstorage {
+  metadata.replace_absolute_path={{ cloudstorage_replace_absolute_path | default('false') }}
+  metadata.list={{ cloudstorage_metadata_list }}
+  relative_path_prefix="{{ cloudstorage_relative_path_prefix | default('CLOUD_STORAGE_BASE_PATH') }}"
+  read_base_path="{{ cloudstorage_base_path }}"
+  write_base_path={{ valid_cloudstorage_base_urls }}
+}
\ No newline at end of file
diff --git a/ansible/roles/stack-sunbird/templates/content-service_application.conf b/ansible/roles/stack-sunbird/templates/content-service_application.conf
index fb5a2e766762743b0dd820804de009f627d43a5e..cdfad751d61e86c683f80837527c2213a3522297 100644
--- a/ansible/roles/stack-sunbird/templates/content-service_application.conf
+++ b/ansible/roles/stack-sunbird/templates/content-service_application.conf
@@ -341,7 +341,7 @@ play.filters {
 
 play.http.parser.maxMemoryBuffer = 50MB
 akka.http.parsing.max-content-length = 50MB
-schema.base_path="{{kp_schema_base_path | default('/home/sunbird/content-service-1.0-SNAPSHOT/schemas')}}"
+schema.base_path="/home/sunbird/content-service-1.0-SNAPSHOT/schemas"
 
 # Cassandra Configuration
 cassandra {
@@ -382,7 +382,7 @@ content {
   }
   h5p {
     library {
-      path: "{{ h5p_library_path }}"
+      path: "{{ h5p_library_path | default('https://sunbirddev.blob.core.windows.net/sunbird-content-dev/h5p-standalone-1.3.4.zip') }}"
     }
   }
   copy {
@@ -481,6 +481,36 @@ composite {
     url : "{{ sunbird_search_service_api_base_url }}/v3/search"
   }
 }
+gcloud_client_key: "{{ cloud_public_storage_accountname }}"
+gcloud_private_secret: """-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCqtqMByEGjddwE
+0oIkQRT4KukhPn65ozDQUfgop55VblUWeJEqmeGfTXdOTTVHpwwuYR9esrMgR5WN
+8IUGSLRmap9iyb4QBUV/gCjJIpsVu6HFMadBQCFceqzTqPMK6g6dwObNtDMxH6yP
+V47L/McwiPNoug2W+zBiRQ6YZ1GvQVY5s0KTX6EgkN/u3DW6kUu6NqcgqGuCWqUo
+Ejss4HaX4D7DSbmKgOts/rKjwtDv9fgKLgbMlufwpxwWe/jygVUNvZumBARNIuVe
++RbO6OvHb26H18KgkdDzB1VkzKX+750iIIa/KGrZHJStiw0zfri0/H0KdzbClvoO
+T6cBN/zVAgMBAAECggEAPN9dJsCKr0fSiaGqqFTQrmFWufT36SgIuu/PpmeGrP3n
+t1iMov02F8DevvwG+KLXVB5rui/MccE00eUm+gOo2DBC304CyOGoU3uWj4oYdNpw
+J8m50ymT+87+m4bOC2FEjvn/EvCjpGuf84kMMS7UtDjRWtGlEOZG7XOkbbHBzdTQ
+GldzEgsqi2T8O5I31xZ1b2LJzAVODrv7TiVElhGcUB/1MkProjhkcyJx3B3cpClw
+Y8Lq2R2urTf4NxMnmh/PmUfBzLQLauSDI/MH9NN57J1M/5uWYAIY/eaf8BtqEsbr
+XLmBP1WfNchXbfXLeadaiAX45ukt0y103qd0TmJa7QKBgQDdvgTcjKMddzfU6PeB
+XO3upl2FVLA6B89J/QiEKoeg4bkM2C3pNkGvgVA3HfHfauMhhwFee8mP14HLZQvb
++0k9tL64CiznkuEfOBApkXJDsW0iAN3TwMj5zVRAVHWBRcexMt74CdySuKDOkV9G
+5feOXfdhOZM6z8LSfGs+2lYbQwKBgQDFFmj8Mtv4Pv5zsF1/UeFnuijkHO84dNYn
+flTB5Pmwb4Z5rhnJzG446cxr9f7E/+3yjd+xtBQf5ttPwvCBbZR20RO2jA5o/qij
+XaYHCjlE7yOpAfgU+p5K3JH9bTMLuPsSVaxBof7cFoqjFalVGmpR1qAj4UGHc9mT
+nV6CGCbqBwKBgQCTI+RV9XzHsLR7s5uJXAEGu56TOv81grkqf52HFjGpsB77Rvgw
+KLCtpUF1UYmOl37gYJWn/Lxjlr2qGgMkljqjl6x2s0nY4L5B2RHgg4MvKC0iwzBv
+sx2ppXaiuWi/v24jR35tWR3kvl72s8Bla3Q6JGBjQ7FO9U5yHd2Md5VrwQKBgAzy
+QOk4KgzvjmVpE2s2pLjcmK0LXYd23U5w1P57nQ9C9DFwB0+jNyZT7VK2MQsdyLKj
+MSuKKbxCvOtLYeMOoK8BYusd3iB1gfxhPXO+7y4hC1WhxHsUT2uZe5mLH8xIVW3J
+5OvWyVgJvwehd6MYfh1sHM7ekCBmsscokjm3fm7nAoGBAL5PXhD6rCaHGOo0KXEA
+0S6rzMI6qBzQvMyOVj7b0lwey6q+G2xl7Cc9IUmxVzhBe7daD6QSQ4dU91ZKaIys
+opfZWibHFcQm6I6FJI3ha73EOB2zyyl3xlBxK9fMQVN8gELdXhA8DBuMD+Qxj6Nr
+bqteFJLCyz7ATtETSb3+hP+G
+-----END PRIVATE KEY-----"""
+gcloud_storage_bucket: "{{ cloud_storage_content_bucketname }}"
 cloud_storage_type: "{{ cloud_service_provider }}"
 cloud_storage_key: "{{ cloud_public_storage_accountname }}"
 cloud_storage_secret: "{{ cloud_public_storage_secret }}"
@@ -494,6 +524,7 @@ kafka {
   urls : "{{ kafka_urls }}"
   topic.send.enable : true
   topics.instruction : "{{ env_name }}.learning.job.request"
+  publish.request.topic : "{{ env_name }}.publish.job.request"
 }
 
 # DIAL Link Config
@@ -637,3 +668,11 @@ collection {
 }
 
 plugin.media.base.url="{{ plugin_media_base_url }}"
+
+cloudstorage {
+  metadata.replace_absolute_path={{ cloudstorage_replace_absolute_path | default('false') }}
+  relative_path_prefix={{ cloudstorage_relative_path_prefix_content }}
+  metadata.list={{ cloudstorage_metadata_list }}
+  read_base_path="{{ cloudstorage_base_path }}"
+  write_base_path={{ valid_cloudstorage_base_urls }}
+}
diff --git a/ansible/roles/stack-sunbird/templates/dial-service_application.conf b/ansible/roles/stack-sunbird/templates/dial-service_application.conf
index 745a8b9bfe5916e8ee0f044bd535f5ff970bded5..dd7b11dbebf4c59cb12d566208f62d7555d258af 100644
--- a/ansible/roles/stack-sunbird/templates/dial-service_application.conf
+++ b/ansible/roles/stack-sunbird/templates/dial-service_application.conf
@@ -150,6 +150,10 @@ system.config.table="system_config"
 publisher.keyspace.name="{{ env_name }}_dialcode_store"
 publisher.keyspace.table="publisher"
 
+#QRCodes Configuration
+qrcodes.keyspace.name="dialcodes"
+qrcodes.keyspace.table="dialcode_batch"
+
 #DIAL Code Generator Configuration
 dialcode.strip.chars="0"
 dialcode.length=6.0 
@@ -191,3 +195,9 @@ jsonld {
     sb_schema = ["http://store.knowlg.sunbird.org/dial/specs/sb/schema.jsonld"]
 }
 
+cloudstorage {
+     metadata.replace_absolute_path="{{ cloudstorage_replace_absolute_path | default('false') }}"
+     relative_path_prefix="{{ cloudstorage_relative_path_prefix_dial | default('DIAL_STORAGE_BASE_PATH') }}"
+     read_base_path="{{ cloudstorage_base_path }}"
+}
+cloud_storage_container="{{ cloud_storage_dial_bucketname | default('dial') }}"
diff --git a/ansible/roles/stack-sunbird/templates/taxonomy-service_application.conf b/ansible/roles/stack-sunbird/templates/taxonomy-service_application.conf
index e1298a1b923a97d65f23495698ae845142a1663f..332206c50205e0d3c52698fdc3a9e68788996a02 100644
--- a/ansible/roles/stack-sunbird/templates/taxonomy-service_application.conf
+++ b/ansible/roles/stack-sunbird/templates/taxonomy-service_application.conf
@@ -398,3 +398,11 @@ objectcategorydefinition.keyspace="{{ lp_cassandra_keyspace_prefix }}_category_s
 
 # Framework master category validation Supported values are Yes/No
 master.category.validation.enabled="{{ master_category_validation_enabled | default('Yes') }}"
+
+cloudstorage {
+  metadata.replace_absolute_path={{ cloudstorage_replace_absolute_path | default('false') }}
+  relative_path_prefix={{ cloudstorage_relative_path_prefix_content }}
+  metadata.list={{ cloudstorage_metadata_list }}
+  read_base_path="{{ cloudstorage_base_path }}"
+  write_base_path={{ valid_cloudstorage_base_urls }}
+}
diff --git a/ansible/uploadFAQs.yml b/ansible/uploadFAQs.yml
index b37398b874e448772b9d18df001193eef0faa976..cf90e343d1be37cc31f7aba27bbc16666365bb4d 100644
--- a/ansible/uploadFAQs.yml
+++ b/ansible/uploadFAQs.yml
@@ -20,12 +20,12 @@
       name: azure-cloud-storage
       tasks_from: blob-upload-batch.yml
     vars:
-      blob_container_name: "{{ upload_storage }}"
+      blob_container_name: "{{ cloud_storage_public_bucketname }}"
       container_public_access: "container"
       blob_container_folder_path: ""
       local_file_or_folder_path: "{{ playbook_dir }}/../utils/{{ item }}"
-      storage_account_name: "{{ azure_public_storage_account_name }}"
-      storage_account_key: "{{ azure_public_storage_account_key }}"
+      storage_account_name: "{{ cloud_public_storage_accountname }}"
+      storage_account_key: "{{ cloud_public_storage_secret }}"
     with_items:
       - "{{ source_folder.split(',') }}"
     when: cloud_service_provider == "azure"
@@ -35,12 +35,12 @@
       name: aws-cloud-storage
       tasks_from: upload-folder.yml
     vars:
-      s3_bucket_name: "{{ aws_public_s3_bucket_name }}"
-      aws_default_region: "{{ aws_region }}"
-      aws_access_key_id: "{{ aws_public_bucket_access_key }}"
-      aws_secret_access_key: "{{ aws_public_bucket_secret_access_key }}"
+      s3_bucket_name: "{{ cloud_storage_public_bucketname }}"
+      aws_default_region: "{{ cloud_public_storage_region }}"
+      aws_access_key_id: "{{ cloud_public_storage_accountname }}"
+      aws_secret_access_key: "{{ cloud_public_storage_secret }}"
       local_file_or_folder_path: "{{ playbook_dir }}/../utils/{{ item }}"
-      s3_path: "{{ upload_storage }}"
+      s3_path: ""
     with_items:
       - "{{ source_folder.split(',') }}"  
     when: cloud_service_provider == "aws"   
@@ -50,13 +50,59 @@
       name: gcp-cloud-storage
       tasks_from: upload-batch.yml
     vars:
-        dest_folder_name: "{{ upload_storage }}"
+        gcp_bucket_name: "{{ cloud_storage_public_bucketname }}"
         dest_folder_path: ""
         local_file_or_folder_path: "{{ playbook_dir }}/../utils/{{ item }}"
-        gcp_bucket_name: "{{ gcloud_public_bucket_name }}"
     with_items:
       - "{{ source_folder.split(',') }}"
     when: cloud_service_provider == "gcloud"
   tags:
     - upload-faqs
+
+- hosts: localhost
+  vars_files:
+    - "{{inventory_dir}}/secrets.yml"
+  tasks:
+  - name: upload batch of files to azure storage
+    include_role:
+      name: azure-cloud-storage
+      tasks_from: blob-upload-batch.yml
+    vars:
+      blob_container_name: "{{ cloud_storage_content_bucketname }}"
+      container_public_access: "container"
+      blob_container_folder_path: ""
+      local_file_or_folder_path: "{{ playbook_dir }}/../utils/{{ item }}"
+      storage_account_name: "{{ cloud_public_storage_accountname }}"
+      storage_account_key: "{{ cloud_public_storage_secret }}"
+    with_items:
+      - "{{ source_folder.split(',') }}"
+    when: cloud_service_provider == "azure"
+
+  - name: upload batch of files to s3
+    include_role:
+      name: aws-cloud-storage
+      tasks_from: upload-folder.yml
+    vars:
+      s3_bucket_name: "{{ cloud_storage_content_bucketname }}"
+      aws_default_region: "{{ cloud_public_storage_region }}"
+      aws_access_key_id: "{{ cloud_public_storage_accountname }}"
+      aws_secret_access_key: "{{ cloud_public_storage_secret }}"
+      local_file_or_folder_path: "{{ playbook_dir }}/../utils/{{ item }}"
+      s3_path: ""
+    with_items:
+      - "{{ source_folder.split(',') }}"
+    when: cloud_service_provider == "aws"
+
+  - name: upload batch of files to gcloud storage
+    include_role:
+      name: gcp-cloud-storage
+      tasks_from: upload-batch.yml
+    vars:
+        gcp_bucket_name: "{{ cloud_storage_content_bucketname }}"
+        dest_folder_path: ""
+        local_file_or_folder_path: "{{ playbook_dir }}/../utils/{{ item }}"
+    with_items:
+      - "{{ source_folder.split(',') }}"
+    when: cloud_service_provider == "gcloud"
+  tags:
     - upload-RC-schema
diff --git a/deploy/jenkins/jenkins-plugins-setup.sh b/deploy/jenkins/jenkins-plugins-setup.sh
index ed9a8c17569af355b875e839f1eb9f36cbe0ec6e..c4256b3620432a14cab2342f39adec9ac42effce 100755
--- a/deploy/jenkins/jenkins-plugins-setup.sh
+++ b/deploy/jenkins/jenkins-plugins-setup.sh
@@ -17,13 +17,13 @@ fi
 
 echo -e "\n\e[0;32m${bold}Downloading and copying jenkins plugin directory to Jenkins ${normal}"
 if [[ ! -d /var/lib/jenkins/plugins ]]; then
-wget https://sunbirdpublic.blob.core.windows.net/installation/plugins.tar
-tar -xf plugins.tar
+wget https://sunbirdpublic.blob.core.windows.net/installation/plugins-2-319-3.tar
+tar -xf plugins-2-319-3.tar
 mv plugins /var/lib/jenkins/
 chown -R jenkins:jenkins /var/lib/jenkins/plugins
 else
-wget https://sunbirdpublic.blob.core.windows.net/installation/plugins.tar
-tar -xf plugins.tar
+wget https://sunbirdpublic.blob.core.windows.net/installation/plugins-2-319-3.tar
+tar -xf plugins-2-319-3.tar
 cp -rf plugins/* /var/lib/jenkins/plugins/
 chown -R jenkins:jenkins /var/lib/jenkins/plugins
 fi
diff --git a/deploy/jenkins/jenkins-server-setup.sh b/deploy/jenkins/jenkins-server-setup.sh
old mode 100755
new mode 100644
index e51471c77bac4f919e642fc93301f832718396a3..201f9ea5e3a8447d853f10093a31eea7e739c062
--- a/deploy/jenkins/jenkins-server-setup.sh
+++ b/deploy/jenkins/jenkins-server-setup.sh
@@ -15,7 +15,10 @@ echo -e "\n\e[0;32m${bold}Installating Jenkins${normal}"
 wget -q -O - https://pkg.jenkins.io/debian-stable/jenkins.io.key | apt-key add -
 apt-add-repository "deb https://pkg.jenkins.io/debian-stable binary/"
 apt-get update
-apt-get install -y jenkins=2.346.2
+
+#apt-get install -y jenkins=2.346.2
+apt-get install -y jenkins=2.319.3
+
 
 echo -e "\n\e[0;32m${bold}Installating PIP${normal}"
 apt-get install -y python-pip
diff --git a/deploy/jenkins/jobs/Deploy/jobs/Vidyadaan/jobs/Kubernetes/jobs/UploadCollectionHierarchyCSV/config.xml b/deploy/jenkins/jobs/Deploy/jobs/Vidyadaan/jobs/Kubernetes/jobs/UploadCollectionHierarchyCSV/config.xml
index 0236cab0eb8c15e309e2016a281f060940858ffc..1363bd3fcfafac17f2382e046db0d9443f759610 100644
--- a/deploy/jenkins/jobs/Deploy/jobs/Vidyadaan/jobs/Kubernetes/jobs/UploadCollectionHierarchyCSV/config.xml
+++ b/deploy/jenkins/jobs/Deploy/jobs/Vidyadaan/jobs/Kubernetes/jobs/UploadCollectionHierarchyCSV/config.xml
@@ -83,7 +83,7 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
           <description/>
           <choices class="java.util.Arrays$ArrayList">
             <a class="string-array">
-              <string>upload-batch</string>
+              <string>upload-csv-template</string>
             </a>
           </choices>
         </hudson.model.ChoiceParameterDefinition>
@@ -102,15 +102,6 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
           <defaultValue/>
           <trim>false</trim>
         </hudson.model.StringParameterDefinition>
-        <hudson.model.ChoiceParameterDefinition>
-          <name>upload_storage</name>
-          <description/>
-          <choices class="java.util.Arrays$ArrayList">
-            <a class="string-array">
-              <string>content-service</string>
-            </a>
-          </choices>
-        </hudson.model.ChoiceParameterDefinition>
         <hudson.model.ChoiceParameterDefinition>
           <name>source_path</name>
           <description/>
diff --git a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadChatbotConfig/config.xml b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadChatbotConfig/config.xml
index defc3a0ddd83dfce698c3eb5ca7ddc59e5e5c66a..79d963a0e7fc745651b4079edfc290b189f62b86 100644
--- a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadChatbotConfig/config.xml
+++ b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadChatbotConfig/config.xml
@@ -84,15 +84,6 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
           <defaultValue>master</defaultValue>
           <trim>false</trim>
         </hudson.model.StringParameterDefinition>
-        <hudson.model.ChoiceParameterDefinition>
-          <name>upload_storage</name>
-          <description/>
-          <choices class="java.util.Arrays$ArrayList">
-            <a class="string-array">
-              <string>chatbot</string>
-            </a>
-          </choices>
-        </hudson.model.ChoiceParameterDefinition>
         <hudson.model.StringParameterDefinition>
           <name>source_path</name>
           <description/>
@@ -169,4 +160,4 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
   </definition>
   <triggers/>
   <disabled>false</disabled>
-</flow-definition>
\ No newline at end of file
+</flow-definition>
diff --git a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadCollectionHierarchyCSV/config.xml b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadCollectionHierarchyCSV/config.xml
index d87aac4ee36ca0846d1c40afc48f82b40cfb388c..72d310489eefb5fa41cab452403c1ff083335266 100644
--- a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadCollectionHierarchyCSV/config.xml
+++ b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadCollectionHierarchyCSV/config.xml
@@ -83,7 +83,7 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
           <description/>
           <choices class="java.util.Arrays$ArrayList">
             <a class="string-array">
-              <string>upload-batch</string>
+              <string>upload-csv-template</string>
             </a>
           </choices>
         </hudson.model.ChoiceParameterDefinition>
@@ -102,15 +102,6 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
           <defaultValue/>
           <trim>false</trim>
         </hudson.model.StringParameterDefinition>
-        <hudson.model.ChoiceParameterDefinition>
-          <name>upload_storage</name>
-          <description/>
-          <choices class="java.util.Arrays$ArrayList">
-            <a class="string-array">
-              <string>sourcing</string>
-            </a>
-          </choices>
-        </hudson.model.ChoiceParameterDefinition>
         <hudson.model.ChoiceParameterDefinition>
           <name>source_path</name>
           <description/>
@@ -175,4 +166,4 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
   </definition>
   <triggers/>
   <disabled>false</disabled>
-</flow-definition>
\ No newline at end of file
+</flow-definition>
diff --git a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadDiscussionUIDocs/config.xml b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadDiscussionUIDocs/config.xml
index a801645925b166dbc593fc0cc1ab1633236d7ef2..66d749e86a726bf9e2d4b4f7335554b64fb43638 100644
--- a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadDiscussionUIDocs/config.xml
+++ b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadDiscussionUIDocs/config.xml
@@ -159,15 +159,6 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
           <choiceType>ET_FORMATTED_HTML</choiceType>
           <omitValueField>true</omitValueField>
         </org.biouno.unochoice.DynamicReferenceParameter>
-        <hudson.model.ChoiceParameterDefinition>
-          <name>upload_storage</name>
-          <description/>
-          <choices class="java.util.Arrays$ArrayList">
-            <a class="string-array">
-              <string>discussion-ui</string>
-            </a>
-          </choices>
-        </hudson.model.ChoiceParameterDefinition>
         <hudson.model.ChoiceParameterDefinition>
           <name>source_path</name>
           <description/>
@@ -191,7 +182,7 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
           <description/>
           <choices class="java.util.Arrays$ArrayList">
             <a class="string-array">
-              <string>upload-batch</string>
+              <string>upload-discussion-ui</string>
             </a>
           </choices>
         </hudson.model.ChoiceParameterDefinition>
diff --git a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadFAQs/config.xml b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadFAQs/config.xml
index 85b7c81efb7d6024f2c3461956fc9780e38e0ea1..9a6fccc4b5dca5834c14c405acebc974aa58f15f 100644
--- a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadFAQs/config.xml
+++ b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadFAQs/config.xml
@@ -71,15 +71,6 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
           <choiceType>ET_FORMATTED_HTML</choiceType>
           <omitValueField>true</omitValueField>
         </org.biouno.unochoice.DynamicReferenceParameter>
-        <hudson.model.ChoiceParameterDefinition>
-          <name>upload_storage</name>
-          <description/>
-          <choices class="java.util.Arrays$ArrayList">
-            <a class="string-array">
-              <string>public</string>
-            </a>
-          </choices>
-        </hudson.model.ChoiceParameterDefinition>
         <hudson.model.ChoiceParameterDefinition>
           <name>tag</name>
           <description/>
diff --git a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadPortalLabel/config.xml b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadPortalLabel/config.xml
index a75d9ee220b950f6bab10eeb1b6baf511efdb504..a1b86809861a8e660fc0c5dd7a86c6f6f2192d16 100644
--- a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadPortalLabel/config.xml
+++ b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Kubernetes/jobs/UploadPortalLabel/config.xml
@@ -71,15 +71,6 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
           <choiceType>ET_FORMATTED_HTML</choiceType>
           <omitValueField>true</omitValueField>
         </org.biouno.unochoice.DynamicReferenceParameter>
-        <hudson.model.ChoiceParameterDefinition>
-          <name>upload_storage</name>
-          <description/>
-          <choices class="java.util.Arrays$ArrayList">
-            <a class="string-array">
-              <string>label</string>
-            </a>
-          </choices>
-        </hudson.model.ChoiceParameterDefinition>
         <hudson.model.ChoiceParameterDefinition>
           <name>destination_path</name>
           <description/>
diff --git a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Sunbird-RC/jobs/UploadRCSchema/config.xml b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Sunbird-RC/jobs/UploadRCSchema/config.xml
index ea47b8d14e2b6f2e228a44f314ef3843bd419f70..1ff2974d6d2f185d1560f037715b14588a35a176 100644
--- a/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Sunbird-RC/jobs/UploadRCSchema/config.xml
+++ b/deploy/jenkins/jobs/Deploy/jobs/dev/jobs/Sunbird-RC/jobs/UploadRCSchema/config.xml
@@ -71,15 +71,6 @@ return """&lt;b&gt;This parameter is not used&lt;/b&gt;"""</script>
           <choiceType>ET_FORMATTED_HTML</choiceType>
           <omitValueField>true</omitValueField>
         </org.biouno.unochoice.DynamicReferenceParameter>
-        <hudson.model.ChoiceParameterDefinition>
-          <name>upload_storage</name>
-          <description/>
-          <choices class="java.util.Arrays$ArrayList">
-            <a class="string-array">
-              <string>sunbird-content-dev</string>
-            </a>
-          </choices>
-        </hudson.model.ChoiceParameterDefinition>
         <hudson.model.ChoiceParameterDefinition>
           <name>tag</name>
           <description/>
diff --git a/kubernetes/helm_charts/core/analytics/values.j2 b/kubernetes/helm_charts/core/analytics/values.j2
index 354dcab3d3b5dd0b6244489e9cdab603e4f50dbc..277f84094745f4997ac018bda20f81c9935c37f3 100644
--- a/kubernetes/helm_charts/core/analytics/values.j2
+++ b/kubernetes/helm_charts/core/analytics/values.j2
@@ -8,11 +8,10 @@ env:
   javaoptions: {{analytics_java_mem_limit|default('-Xmx600m')}}
   min_heap: {{analytics_min_heap_limit|default('-Xms1g')}}
   max_heap: {{analytics_max_heap_limit|default('-Xmx2g')}}
-  azure_private_account_secret: {{ sunbird_private_storage_account_key }}
-  azure_private_account_name: {{ sunbird_private_storage_account_name }}
-  azure_public_account_secret: {{ sunbird_public_storage_account_key }}
-  azure_public_account_name: {{ sunbird_public_storage_account_name }}
-
+  azure_private_account_secret: {{ cloud_private_storage_secret }}
+  azure_private_account_name: {{ cloud_private_storage_accountname }}
+  azure_public_account_secret: {{ cloud_public_storage_secret }}
+  azure_public_account_name: {{ cloud_public_storage_accountname }}
 replicaCount: {{analytics_replicacount|default(1)}}
 repository: {{analytics_repository|default('sunbird-analytics-service')}}
 image_tag: {{ image_tag }}
diff --git a/kubernetes/helm_charts/sunbird-RC/registry/schemas/TrainingCertificate.json b/kubernetes/helm_charts/sunbird-RC/registry/schemas/TrainingCertificate.json
index 5187b08e810b99d89f664da2e69150984474de12..84dc1d54293fed7073e84cc9290e94f6c53d4d6c 100644
--- a/kubernetes/helm_charts/sunbird-RC/registry/schemas/TrainingCertificate.json
+++ b/kubernetes/helm_charts/sunbird-RC/registry/schemas/TrainingCertificate.json
@@ -69,6 +69,6 @@
 		],
 		"systemFields": ["osCreatedAt", "osUpdatedAt", "osCreatedBy", "osUpdatedBy"],
 		"enableLogin": false,
-		"credentialTemplate": "https://{{ upstream_url }}/schema/credential_template.json"
+		"credentialTemplate": "{{ upstream_url }}/schema/credential_template.json"
 	}
 }
diff --git a/kubernetes/helm_charts/sunbird-RC/registry/values.j2 b/kubernetes/helm_charts/sunbird-RC/registry/values.j2
index e7e6e6f31c2659e0486185571a32a3dc7f81c72f..433edbfc923f4423e17a280bc164689ce8752c94 100644
--- a/kubernetes/helm_charts/sunbird-RC/registry/values.j2
+++ b/kubernetes/helm_charts/sunbird-RC/registry/values.j2
@@ -44,6 +44,19 @@ rccoreenv:
   connectionInfo_maxPoolSize: {{ registry_connectionInfo_maxPoolSize|default('200')}}
   auditTaskExecutor_queueCapacity: {{ registry_auditTaskExecutor_queueCapacity|default('100')}}
   taskExecutor_index_queueCapacity: {{ registry_taskExecutor_index_queueCapacity|default('100')}}
+  authentication_enabled: {{ registry_authentication_enabled|default('false')}}
+  async_enabled: {{ registry_async_enabled|default('true')}}
+  webhook_enabled: {{ registry_webhook_enabled|default('true')}}
+  ZOOKEEPER_CLIENT_PORT: {{ registry_zookeeper_client_port|default('2181')}}
+  ZOOKEEPER_TICK_TIME: {{ registry_zookeeper_tick_time|default('2000')}}
+  KAFKA_BROKER_ID: {{ registry_kafka_broker_id|default('1')}}
+  KAFKA_ZOOKEEPER_CONNECT: "{{groups['processing-cluster-zookeepers']|join(':2181,')}}:2181"
+  KAFKA_ADVERTISED_LISTENERS: "{{groups['processing-cluster-kafka']|join(':9092,')}}:9092"
+  kafka_bootstrap_address: "{{groups['processing-cluster-kafka']|join(':9092,')}}:9092"
+  KAFKA_LISTENER_SECURITY_PROTOCOL_MAP:  {{ registry_listener_security_protocol_map|default('INTERNAL:PLAINTEXT,OUTSIDE:PLAINTEXT')}}
+  KAFKA_INTER_BROKER_LISTENER_NAME:  {{ registry_inter_broker_listener_name|default('INTERNAL')}}
+  KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR:  {{ registry_offsets_topic_replication_factor|default('1')}}
+  logging.level.root : {{ registry_logging_level|default('INFO')}}
 
 {# The below should get enabled once the service has probes implemented #}
 {# {{ registry_liveness_readiness | to_nice_yaml }} #}
diff --git a/pipelines/deploy/ContentFramework/Jenkinsfile b/pipelines/deploy/ContentFramework/Jenkinsfile
index c495bce26669de42f1af43fafa06bafd3668d3db..a02c72eb695f302a9b923638f83b1d910e3e582f 100644
--- a/pipelines/deploy/ContentFramework/Jenkinsfile
+++ b/pipelines/deploy/ContentFramework/Jenkinsfile
@@ -44,6 +44,7 @@ node() {
                     sh """                                                   
                       zip -r content-editor-artifact.zip ansible/content-editor
                       cd ansible/content-editor
+                      sudo npm install -g gulp
                       npm install
                       npm install promise
                       gulp minifyJs
diff --git a/pipelines/deploy/desktop-faq/Jenkinsfile b/pipelines/deploy/desktop-faq/Jenkinsfile
index d282ec288422e9fd26322a0f1751b3175f8c2b91..1b1a8d7f0dfd1e04990bef5da435d7d8802082bc 100644
--- a/pipelines/deploy/desktop-faq/Jenkinsfile
+++ b/pipelines/deploy/desktop-faq/Jenkinsfile
@@ -25,7 +25,7 @@ node() {
                 jobName = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-1].trim()
                 currentWs = sh(returnStdout: true, script: 'pwd').trim()
                 ansiblePlaybook = "${currentWs}/ansible/desktop-faq-upload.yml"
-                ansibleExtraArgs = "--extra-vars \" upload_storage=${params.upload_storage} src_file_path=${params.src_file_path} destination_path=${params.destination_path} env_name=$envDir\" --vault-password-file /var/lib/jenkins/secrets/vault-pass --tags ${params.tag}"
+                ansibleExtraArgs = "--extra-vars \" src_file_path=${params.src_file_path} destination_path=${params.destination_path} env_name=$envDir\" --vault-password-file /var/lib/jenkins/secrets/vault-pass --tags ${params.tag}"
                 values.put('currentWs', currentWs)
                 values.put('env', envDir)
                 values.put('module', module)
diff --git a/pipelines/ops/kill-spark-jobs/Jenkinsfile b/pipelines/ops/kill-spark-jobs/Jenkinsfile
new file mode 100644
index 0000000000000000000000000000000000000000..37bad74c466d2e92db8034fe61bdac90200c6120
--- /dev/null
+++ b/pipelines/ops/kill-spark-jobs/Jenkinsfile
@@ -0,0 +1,51 @@
+@Library('deploy-conf') _
+node() {
+    try {
+        String ANSI_GREEN = "\u001B[32m"
+        String ANSI_NORMAL = "\u001B[0m"
+        String ANSI_BOLD = "\u001B[1m"
+        String ANSI_RED = "\u001B[31m"
+        String ANSI_YELLOW = "\u001B[33m"
+
+        stage('checkout public repo') {
+            folder = new File("$WORKSPACE/.git")
+            if (folder.exists())
+            {
+               println "Found .git folder. Clearing it.."
+               sh'git clean -fxd'
+            }
+            checkout scm
+        }
+
+        ansiColor('xterm') {
+            stage('deploy'){
+                values = [:]
+                currentWs = sh(returnStdout: true, script: 'pwd').trim()
+                envDir = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-3].trim()
+                module = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-2].trim()
+                jobName = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-1].trim()
+                ansiblePlaybook = "${currentWs}/ansible/kill_spark_jobs.yaml"
+                ansibleExtraArgs = "-v"
+                values.put('currentWs', currentWs)
+                values.put('env', envDir)
+                values.put('module', module)
+                values.put('jobName', jobName)
+                values.put('ansiblePlaybook', ansiblePlaybook)
+                values.put('ansibleExtraArgs', ansibleExtraArgs)
+                println values
+                ansible_playbook_run(values)
+                currentBuild.result = 'SUCCESS'
+                currentBuild.description = "Private: ${params.private_branch}, Public: ${params.branch_or_tag}"
+            }
+        }
+         summary()
+     }
+    catch (err) {
+        currentBuild.result = 'FAILURE'
+        throw err
+    }    
+    finally {
+        slack_notify(currentBuild.result)
+        email_notify()
+    }
+} 
diff --git a/pipelines/upload/chatbot/Jenkinsfile b/pipelines/upload/chatbot/Jenkinsfile
index c97597c44c123488eeee4f4d18b0142b7cfb7a5b..da0774f3826a7a30609d29eb7aea8d1ffae081d2 100644
--- a/pipelines/upload/chatbot/Jenkinsfile
+++ b/pipelines/upload/chatbot/Jenkinsfile
@@ -38,7 +38,7 @@ node() {
                 jobName = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-1].trim()
                 currentWs = sh(returnStdout: true, script: 'pwd').trim()
                 ansiblePlaybook = "${currentWs}/ansible/desktop-faq-upload.yml"
-                ansibleExtraArgs = "--extra-vars \" upload_storage=${params.upload_storage} src_file_path=${params.source_path} destination_path=${params.destination_path}\" --vault-password-file /var/lib/jenkins/secrets/vault-pass --tags ${params.tag}"
+                ansibleExtraArgs = "--extra-vars \" src_file_path=${params.source_path} destination_path=${params.destination_path}\" --vault-password-file /var/lib/jenkins/secrets/vault-pass --tags ${params.tag}"
                 values.put('currentWs', currentWs)
                 values.put('env', envDir)
                 values.put('module', module)
diff --git a/pipelines/upload/discussion-UI/Jenkinsfile b/pipelines/upload/discussion-UI/Jenkinsfile
index c4d794fb3e3a0107e122f0cddd6e1b33f7579d82..067158e44590096c38a08872f13dbeb60289ecd8 100644
--- a/pipelines/upload/discussion-UI/Jenkinsfile
+++ b/pipelines/upload/discussion-UI/Jenkinsfile
@@ -30,7 +30,7 @@ node() {
                        unzip ${artifact}
                     """
                     ansiblePlaybook = "${currentWs}/ansible/desktop-faq-upload.yml"
-                    ansibleExtraArgs = "--extra-vars \" upload_storage=${params.upload_storage} src_file_path=${params.source_path} destination_path=${params.destination_path}\" --vault-password-file /var/lib/jenkins/secrets/vault-pass --tags ${params.tag}"
+                    ansibleExtraArgs = "--extra-vars \" src_file_path=${params.source_path} destination_path=${params.destination_path}\" --vault-password-file /var/lib/jenkins/secrets/vault-pass --tags ${params.tag}"
                     values.put('ansiblePlaybook', ansiblePlaybook)
                     values.put('ansibleExtraArgs', ansibleExtraArgs)
                     println values
diff --git a/pipelines/upload/faqs/Jenkinsfile b/pipelines/upload/faqs/Jenkinsfile
index 4f18801b4eaa238ff59ac68c328e3119682b240f..f44c1b50204609cc34dc9f0c6652ca98223ff0aa 100644
--- a/pipelines/upload/faqs/Jenkinsfile
+++ b/pipelines/upload/faqs/Jenkinsfile
@@ -25,7 +25,7 @@ node() {
                 jobName = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-1].trim()
                 currentWs = sh(returnStdout: true, script: 'pwd').trim()
                 ansiblePlaybook = "${currentWs}/ansible/uploadFAQs.yml"
-                ansibleExtraArgs = "--tags ${params.tag} --extra-vars \"upload_storage=${params.upload_storage} source_folder=${params.source_folder}\" --vault-password-file /var/lib/jenkins/secrets/vault-pass"
+                ansibleExtraArgs = "--tags ${params.tag} --extra-vars \" source_folder=${params.source_folder}\" --vault-password-file /var/lib/jenkins/secrets/vault-pass"
                 values.put('currentWs', currentWs)
                 values.put('env', envDir)
                 values.put('module', module)
diff --git a/pipelines/upload/portal-csv/Jenkinsfile b/pipelines/upload/portal-csv/Jenkinsfile
index 6e8453d3e274fbbf9e3f7497ac3bc43fe19fb4ff..502fadcdbbf35139a9e1cc95799ebba4649dcf46 100644
--- a/pipelines/upload/portal-csv/Jenkinsfile
+++ b/pipelines/upload/portal-csv/Jenkinsfile
@@ -27,7 +27,7 @@ node() {
                 jobName = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-1].trim()
                 currentWs = sh(returnStdout: true, script: 'pwd').trim()
                 ansiblePlaybook = "${currentWs}/ansible/desktop-faq-upload.yml"
-                ansibleExtraArgs = "--extra-vars \" upload_storage=${params.upload_storage} src_file_path=${params.source_path} destination_path=${params.destination_path}\" --vault-password-file /var/lib/jenkins/secrets/vault-pass --tags ${params.tag}"
+                ansibleExtraArgs = "--extra-vars \" src_file_path=${params.source_path} destination_path=${params.destination_path}\" --vault-password-file /var/lib/jenkins/secrets/vault-pass --tags ${params.tag}"
                 values.put('currentWs', currentWs)
                 values.put('env', envDir)
                 values.put('module', module)
diff --git a/pipelines/upload/schema/dial/Jenkinsfile b/pipelines/upload/schema/dial/Jenkinsfile
index dd74b2f23f30a03a7d9f12275ab5b3c39871802c..a91956eaf1fa1273feb6d846497d269c923f8f01 100644
--- a/pipelines/upload/schema/dial/Jenkinsfile
+++ b/pipelines/upload/schema/dial/Jenkinsfile
@@ -29,7 +29,7 @@ node() {
                 git clone https://github.com/project-sunbird/sunbird-dial-service.git -b ${params.dial_branch_or_tag}
                     """
                     ansiblePlaybook = "${currentWs}/ansible/dial_upload-schema.yml"
-                    ansibleExtraArgs = "--extra-vars \" source_name=${currentWs}/sunbird-dial-service/schemas\" --vault-password-file /var/lib/jenkins/secrets/vault-pass"  
+                    ansibleExtraArgs = "--extra-vars \" source_name=${currentWs}/sunbird-dial-service/jsonld-schema \" --vault-password-file /var/lib/jenkins/secrets/vault-pass"
                     values.put('currentWs', currentWs)
                     values.put('env', envDir)
                     values.put('module', module)
diff --git a/private_repo/ansible/inventory/dev/KnowledgePlatform/hosts b/private_repo/ansible/inventory/dev/KnowledgePlatform/hosts
index 614101f204d95bb098747323656157b2953305ef..fe21f6a81bfc3ce3b02d04cc23e498b615686544 100644
--- a/private_repo/ansible/inventory/dev/KnowledgePlatform/hosts
+++ b/private_repo/ansible/inventory/dev/KnowledgePlatform/hosts
@@ -17,7 +17,7 @@ learning1
 redis1
 
 [dial1]
-18.3.1.5
+10.0.1.5
 
 [dial:children]
 dial1
diff --git a/utils/sunbird-RC/schema/credential_template.json b/utils/sunbird-RC/schema/credential_template.json
index f96a3c0528b757ffaaeb5004c75a3578c78e8ff0..a999eca8dabee97daa8f513a301b9e7229c008bb 100644
--- a/utils/sunbird-RC/schema/credential_template.json
+++ b/utils/sunbird-RC/schema/credential_template.json
@@ -1,7 +1,8 @@
 {
     "@context": [
-    "https://{{ upstream_url }}/schema/v1_context.json",
-    "https://{{ upstream_url }}/schema/sunbird_context.json"
+    "https://www.w3.org/2018/credentials/v1",
+    "{{ upstream_url }}/schema/v1_context.json",
+    "{{ upstream_url }}/schema/sunbird_context.json"
  ],
     "type": [
         "VerifiableCredential"