diff --git a/ansible/core_kafka_setup.yml b/ansible/core_kafka_setup.yml
new file mode 100644
index 0000000000000000000000000000000000000000..c48a5c464502acc3cc802f7c3c36fa1e228de0d2
--- /dev/null
+++ b/ansible/core_kafka_setup.yml
@@ -0,0 +1,12 @@
+---
+- hosts: processing-cluster-kafka
+  become: yes
+  pre_tasks:
+    - name: Registering kafka_id
+      set_fact:
+        kafka_id: "{% for servername in play_hosts %}{% if inventory_hostname==servername %}{{ loop.index }}{% endif %}{% endfor %}"
+    - name: Print kafka_id
+      debug:
+        var: kafka_id
+  roles:
+    - setup-kafka
diff --git a/ansible/docker_image_push.yml b/ansible/docker_image_push.yml
index a9bb19b231919740d001855a3969032b3ad13000..5b0dc91f01c7efcdae2e3c5696244b76c3d0f768 100644
--- a/ansible/docker_image_push.yml
+++ b/ansible/docker_image_push.yml
@@ -15,3 +15,7 @@
       docker_image:
         name: "{{ hub_org }}/{{ image_name }}:{{ image_tag }}"
         push: yes
+   
+    - file:
+        path: "/root/.docker"
+        state: absent
diff --git a/ansible/elasticsearch-api-query.yml b/ansible/elasticsearch-api-query.yml
index ae4e7648365395312801d309edf438c3551b086c..884c7fe6f92df4647562f0c76d86090d01d36fe5 100644
--- a/ansible/elasticsearch-api-query.yml
+++ b/ansible/elasticsearch-api-query.yml
@@ -1,9 +1,20 @@
-- hosts: log-es-1
+- hosts: log-es-2
   vars_files:
     - ['{{inventory_dir}}/secrets.yml']
-  tasks: 
+  become: yes    
+  roles:
+    - azure-cli
+  tasks:
+    - name: install jq
+      apt:
+       name: apt
+       state: present
+    - name: Copy the elasticsearch api query Shell script
+      template: 
+       src: static-files/api_count_query.sh
+       dest: /tmp/
+       mode: 0775
     - name: Running the shell script to get the data from elasticsearch
-      command: ./api_count.sh "{{ mail_server_username }}" "{{ core_vault_mail_server_password }}" "{{ analytics_report_container_name }}" "{{ analytics_report_azure_account_name }}" "{{ core_vault_analytics_report_azure_account_key }}"
+      command: ./api_count_query.sh "{{ mail_server_username }}" "{{ core_vault_mail_server_password }}" "{{ analytics_report_container_name }}" "{{ analytics_report_azure_account_name }}" "{{ core_vault_analytics_report_azure_account_key }}"
       args:
-        chdir: /home/ops
-      become: yes
+        chdir: /tmp
diff --git a/ansible/inventory/env/group_vars/all.yml b/ansible/inventory/env/group_vars/all.yml
index f1dcb8fbe098a5820fe6acd1cf333405aca4b1df..a43301dfb04f2a06e324ee0dde47a7edc7969de5 100644
--- a/ansible/inventory/env/group_vars/all.yml
+++ b/ansible/inventory/env/group_vars/all.yml
@@ -658,3 +658,16 @@ kafka_urls: "{{groups['processing-cluster-kafka']|join(':9092,')}}:9092"
 kafka_topics_certificate_instruction: "{{env_name}}.coursebatch.certificate.request"
 cert_service_container_name: "{{env}}-e-credentials"
 cert_service_cloud_storage_type: "{{cert_service_cloud_storage_type}}"
+
+### Release-2.4.0 ###
+samza_tar_files_localpath: roles/samza-jobs/defaults    
+job_names:    
+  lms.user-account-merger_1:    
+      job_file_name: 'user-account-merge'    
+  lms.sso-account-updater_1:    
+      job_file_name: 'sso-account-updater'    
+  lms.indexer_1:    
+    job_file_name: 'indexer'
+
+### api call report mailing list ####
+api_report_mailing_list: "" ## This mailing list to send the daily api count report. if adaptor want to use they have to override in common.yml   
diff --git a/ansible/roles/cert-templates/tasks/main.yml b/ansible/roles/cert-templates/tasks/main.yml
index 10e09bc5dcbefc7ec482e66709a3c303fce7d9f0..ae1f5160a17d82106dec5212c91ee0c3b7f3994d 100644
--- a/ansible/roles/cert-templates/tasks/main.yml
+++ b/ansible/roles/cert-templates/tasks/main.yml
@@ -47,3 +47,14 @@
   async: 60
   poll: 10
 
+- name: list all the files
+  shell: "ls -lR {{cert_location}}"
+  register: allfiles
+
+- debug:
+    var: allfiles
+
+- name: Remove unwanted files
+  file:
+    path: "{{cert_location}}/cert-templates"
+    state: absent
diff --git a/ansible/roles/kong-api/defaults/main.yml b/ansible/roles/kong-api/defaults/main.yml
index ce7303b9a7cf5920926337364eb2436eb79e2afc..6a6e10a69e16fa3b8b63f9607fd416b060934b0c 100644
--- a/ansible/roles/kong-api/defaults/main.yml
+++ b/ansible/roles/kong-api/defaults/main.yml
@@ -4424,17 +4424,6 @@ kong_apis:
     - {name: 'acl', config.whitelist: 'userUpdate'}
     - {name: 'rate-limiting', config.policy: "local", config.hour: "{{ medium_rate_limit_per_hour  }}"}
     - {name: 'request-size-limiting', config.allowed_payload_size: "{{ small_request_size_limit }}" }
-  - name: "certAdd"
-    request_path: "{{ user_service_prefix }}/v1/certs/add"
-    upstream_url: "{{ learning_service_url }}/private/user/v1/certs/add"
-    strip_request_path: true
-    plugins:
-    - {name: 'jwt'}
-    - {name: 'cors'}
-    - "{{ statsd_pulgin }}"
-    - {name: 'acl', config.whitelist: 'userUpdate'}
-    - {name: 'rate-limiting', config.policy: "local", config.hour: "{{ medium_rate_limit_per_hour  }}"}
-    - {name: 'request-size-limiting', config.allowed_payload_size: "{{ small_request_size_limit }}" }
   - name: "orgAssignKeys"
     request_path: "{{ org_service_prefix }}/v1/assign/key"
     upstream_url: "{{ learning_service_url }}/v1/org/assign/key"
@@ -4446,28 +4435,6 @@ kong_apis:
     - {name: 'acl', config.whitelist: 'orgAdmin'}
     - {name: 'rate-limiting', config.policy: "local", config.hour: "{{ medium_rate_limit_per_hour  }}"}
     - {name: 'request-size-limiting', config.allowed_payload_size: "{{ small_request_size_limit }}" }
-  - name: "identifierFreeUp"
-    request_path: "{{ user_service_prefix }}/v1/identifier/freeup"
-    upstream_url: "{{ learning_service_url }}/private/user/v1/identifier/freeup"
-    strip_request_path: true
-    plugins:
-    - {name: 'jwt'}
-    - {name: 'cors'}
-    - "{{ statsd_pulgin }}"
-    - {name: 'acl', config.whitelist: 'publicUser'}
-    - {name: 'rate-limiting', config.policy: "local", config.hour: "{{ medium_rate_limit_per_hour  }}"}
-    - {name: 'request-size-limiting', config.allowed_payload_size: "{{ small_request_size_limit }}" }
-  - name: "resetPassword"
-    request_path: "{{ user_service_prefix }}/v1/password/reset"
-    upstream_url: "{{ learning_service_url }}/private/user/v1/password/reset"
-    strip_request_path: true
-    plugins:
-    - {name: 'jwt'}
-    - {name: 'cors'}
-    - "{{ statsd_pulgin }}"
-    - {name: 'acl', config.whitelist: 'publicUser'}
-    - {name: 'rate-limiting', config.policy: "local", config.hour: "{{ medium_rate_limit_per_hour  }}"}
-    - {name: 'request-size-limiting', config.allowed_payload_size: "{{ small_request_size_limit }}" }
   - name: "mergeUserAccounts"
     request_path: "{{ user_service_prefix }}/v1/account/merge"
     upstream_url: "{{ learning_service_url }}/private/user/v1/account/merge"
diff --git a/ansible/roles/offline-installer/templates/setupOfflineInstaller.sh.j2 b/ansible/roles/offline-installer/templates/setupOfflineInstaller.sh.j2
index c489c40209121646d41a756069fc643cdbc86a41..df891b0887494470441757ee1bef375cb806f31f 100644
--- a/ansible/roles/offline-installer/templates/setupOfflineInstaller.sh.j2
+++ b/ansible/roles/offline-installer/templates/setupOfflineInstaller.sh.j2
@@ -8,13 +8,20 @@ n 8.16
 cd src
 npm install
 npm run dist
-npm run dist-win64
-npm run dist-linux
-# npm run dist-win32  ## For win 32 bit installation
-# Copy the build artifacts to specific folder
-#cp src/dist/DIKSHA\ Setup\ 1.0.0.exe offline_artifacts/{{build_number}}/
-#cp src/dist/DIKSHA_1.0.0_amd64.deb offline_artifacts/{{build_number}}/
 
-cp 'dist/1.0.0/win/x64/DIKSHA Setup 1.0.0.exe' ../offline_artifacts/{{build_number}}/
-cp dist/1.0.0/linux/x64/DIKSHA_1.0.0_amd64.deb ../offline_artifacts/{{build_number}}/
-#cp 'src/dist/1.0.0/win/x32/DIKSHA Setup 1.0.0.exe' 'offline_artifacts/{{build_number}}//DIKSHA Setup 1.0.0.32.exe'
+if [ "{{offline_installer_type}}" == "windows32" ];
+then
+	npm run dist-win32
+        ls -lR
+	cp 'dist/1.0.0/win/ia32/DIKSHA Setup 1.0.0.exe' ../offline_artifacts/{{build_number}}/
+elif [ "{{offline_installer_type}}" == "windows64" ];
+then
+	npm run dist-win64
+        ls -lR
+        cp 'dist/1.0.0/win/x64/DIKSHA Setup 1.0.0.exe' ../offline_artifacts/{{build_number}}/
+elif [ "{{offline_installer_type}}" == "debian" ];
+then
+	npm run dist-linux
+        ls -lR
+        cp dist/1.0.0/linux/x64/DIKSHA_1.0.0_amd64.deb ../offline_artifacts/{{build_number}}/
+fi
diff --git a/ansible/roles/samza-jobs/defaults/main.yml b/ansible/roles/samza-jobs/defaults/main.yml
index bde0b20f953d4fa440a9e8661091e87e4bb5255d..0adcc6cd9f8ce380db7604bd7f5f151b4bf23260 100644
--- a/ansible/roles/samza-jobs/defaults/main.yml
+++ b/ansible/roles/samza-jobs/defaults/main.yml
@@ -6,5 +6,11 @@ hadoop_version: 2.7.2
 __yarn_port__: 8000
 cassandra_port: 9042
 es_port: 9200
-
-#telemetry_extractor_container_memory_mb: 1024
+samza_tar_files_localpath: roles/samza-jobs/defaults
+job_names:
+  lms.user-account-merger_1:
+      job_file_name: 'user-account-merge'
+  lms.sso-account-updater_1:
+      job_file_name: 'sso-account-updater'
+  lms.indexer_1:
+      job_file_name: 'indexer'
diff --git a/ansible/roles/samza-jobs/files/find_job_name.sh b/ansible/roles/samza-jobs/files/find_job_name.sh
deleted file mode 100644
index 05f06052239d66020f7ea30ee92ff3201805aa2c..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/files/find_job_name.sh
+++ /dev/null
@@ -1 +0,0 @@
-sed -n "/job\.name.*$/ p" $1 | sed -n "s/=/\\t/g p" | cut -f 2
\ No newline at end of file
diff --git a/ansible/roles/samza-jobs/files/get_all_job_name.sh b/ansible/roles/samza-jobs/files/get_all_job_name.sh
deleted file mode 100644
index 7975c8a34a8c5d3c7a72df8be218e945ad4be47d..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/files/get_all_job_name.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env bash
-find . -name "*.properties" | while read fname; do
-  job_name=`sed -n "/^job\.name.*$/ p" $fname | sed -n "s/=/\\t/g p" | cut -f 2`
-  folder_path=$(dirname `dirname "$fname"`)
-  folder_name=`basename $folder_path`
-  echo "$folder_name:$job_name:---:stopped"
-done > $1
diff --git a/ansible/roles/samza-jobs/files/get_all_running_app_id.sh b/ansible/roles/samza-jobs/files/get_all_running_app_id.sh
deleted file mode 100644
index 74aa7c049123dc11ecb7572a22aef2c30f5db088..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/files/get_all_running_app_id.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/usr/bin/env bash
-./yarn application -list | cut -f 2 | sed 1,'/Application-Name/'d
\ No newline at end of file
diff --git a/ansible/roles/samza-jobs/files/get_all_running_app_name.sh b/ansible/roles/samza-jobs/files/get_all_running_app_name.sh
deleted file mode 100644
index b3b1b9dff2010a1073ca96482db8505e6ededf5a..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/files/get_all_running_app_name.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env bash
-job_names=(`./yarn application -list | cut -f 2 | sed 1,'/Application-Name/'d | sed 's/_1$//'`)
-job_ids=(`./yarn application -list | cut -f 1 | sed 1,'/Application-Id/'d`)
-count=${#job_names[@]}
-for (( i=0; i<${count}; i++ ));
-do
-	job_name=${job_names[i]}
-	job_id=${job_ids[i]}
-	`sed -i /$job_name/s/stopped/started/g $1`
-	`sed -i /$job_name/s/---/$job_id/g $1`
-done
diff --git a/ansible/roles/samza-jobs/files/kill_all_app.sh b/ansible/roles/samza-jobs/files/kill_all_app.sh
deleted file mode 100644
index 55f7341e25ea3350113c398574182c4ad351a0cb..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/files/kill_all_app.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env bash
-./yarn application -list > applist.txt
-sed -n "/$1.*$/ p" applist.txt | cut -f 1 > temp.txt
-while read in;
-do
-./yarn application -kill  "$in";
-done < temp.txt
-rm temp.txt
-rm applist.txt
\ No newline at end of file
diff --git a/ansible/roles/samza-jobs/files/kill_jobs.sh b/ansible/roles/samza-jobs/files/kill_jobs.sh
deleted file mode 100644
index 267515cdea2a7761b542db764418868145d59a71..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/files/kill_jobs.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env bash
-cat $1 | while read LINE
-do
- application_id=`echo $LINE | awk -F':' '{print $3}'`;
- status=`echo $LINE | awk -F':' '{print $4}'`;
- 
- if [ "$status" == "restart" ]
- then
-  ./yarn application -kill $application_id
- fi
-done
\ No newline at end of file
diff --git a/ansible/roles/samza-jobs/files/remove_old_tar.sh b/ansible/roles/samza-jobs/files/remove_old_tar.sh
deleted file mode 100644
index 13d0547b89be9c75b3f076139ea522137489fc8b..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/files/remove_old_tar.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-cat $1 | awk -F':' '{print $1}' > tmp.txt
-DIRS=`ls -l $2/extract/ | egrep '^d'| awk '{print $9}'`
-for dir in $DIRS
-do
-  if ! grep -Fxq $dir tmp.txt
-  then
-     rm -rf $dir
-     rm $2/$dir
-  fi
-done
-rm tmp.txt
\ No newline at end of file
diff --git a/ansible/roles/samza-jobs/files/start_jobs.sh b/ansible/roles/samza-jobs/files/start_jobs.sh
deleted file mode 100644
index 4d048a58a8000b62b27b89202898f6d2f6e15525..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/files/start_jobs.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env bash
-folder_path=$2
-cat $1 | while read LINE
-do
- dir_name=`echo $LINE | awk -F':' '{print $1}'`;
- job_name=`echo $LINE | awk -F':' '{print $2}'`;
- application_id=`echo $LINE | awk -F':' '{print $3}'`;
- status=`echo $LINE | awk -F':' '{print $4}'`;
- properties_path="$folder_path/$dir_name/config/*.properties"
- config_file_path=`ls -d $properties_path`
- if [ "$status" == "stopped" ] || [ "$status" == "restart" ]
- then
-   ./$dir_name/bin/run-job.sh --config-factory=org.apache.samza.config.factories.PropertiesConfigFactory --config-path=file:///$config_file_path
- fi
-done
\ No newline at end of file
diff --git a/ansible/roles/samza-jobs/files/update_new_job_name.sh b/ansible/roles/samza-jobs/files/update_new_job_name.sh
deleted file mode 100644
index 24e174ce540866aafd3ef3e88c66b3bc50d983a8..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/files/update_new_job_name.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-find $2 -name "*.properties" | while read fname; do
-  job_name=`sed -n "/^job\.name.*$/ p" $fname | sed -n "s/=/\\t/g p" | cut -f 2`
-  folder_path=$(dirname `dirname "$fname"`)
-  folder_name=`basename $folder_path`
-  if grep -Fwq $job_name $1
-  	then
-      `sed -i /$job_name/s/^.*\.gz/$folder_name/ $1`;
-      `sed -i /$job_name/s/started/restart/ $1`;
-  	else
-      echo "adding"
-    	echo "$folder_name:$job_name:---:stopped" >> $1
-  fi
-done
\ No newline at end of file
diff --git a/ansible/roles/samza-jobs/tasks/deploy.yml b/ansible/roles/samza-jobs/tasks/deploy.yml
deleted file mode 100644
index 67c51a8b42b5eb28578edece68993d830f57433f..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/tasks/deploy.yml
+++ /dev/null
@@ -1,101 +0,0 @@
----
-- name: Create Directory for Jobs
-  file: path={{item}} owner=hduser group=hadoop recurse=yes state=directory
-  with_items:
-    - "{{samza_jobs_dir}}"
-    - "{{samza_jobs_dir}}/extract"
-
-- name: Copy script to get all running jobs
-  copy: src=get_all_running_app_name.sh dest=/usr/local/hadoop/bin owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
-
-- name: Copy script to get all job names
-  copy: src=get_all_job_name.sh dest="{{samza_jobs_dir}}/extract" owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
-
-- name: Copy script to get updated job names from extracted tar
-  copy: src=update_new_job_name.sh dest="{{samza_jobs_dir}}/extract" owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
-
-- name: Copy script to start jobs based on the status
-  copy: src=start_jobs.sh dest="{{samza_jobs_dir}}/extract" owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
-
-- name: Copy script to remove old job tar
-  copy: src=remove_old_tar.sh dest="{{samza_jobs_dir}}/extract" owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
-
-- name: Copy script to kill jobs based on the status
-  copy: src=kill_jobs.sh dest=/usr/local/hadoop/bin owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
-
-- name: Remove file of job status
-  file: path="{{job_status_file}}" state=absent
-
-- name: Get job names from folder
-  command: bash -lc "./get_all_job_name.sh {{job_status_file}}"
-  args:
-    chdir: "{{samza_jobs_dir}}/extract"
-
-- name: Ensure yarn resource manager is running
-  command: bash -lc "(ps aux | grep yarn-hduser-resourcemanager | grep -v grep) || /usr/local/hadoop/sbin/yarn-daemon.sh --config /usr/local/hadoop-{{hadoop_version}}/conf/ start resourcemanager"
-  become: yes
-  become_user: hduser
-
-- name: Update status of running job in file
-  command: bash -lc "./get_all_running_app_name.sh {{job_status_file}}"
-  args:
-    chdir: /usr/local/hadoop/bin
-
-- name: copy new jobs tar ball
-  copy: src={{ item }} dest={{samza_jobs_dir}}/ force=no owner=hduser group=hadoop
-  with_fileglob:
-    - ./jobs/*
-  register: new_jobs
-
-- name: Create Directory to extract new jobs
-  file: path={{samza_jobs_dir}}/extract/{{item.item | basename }} owner=hduser group=hadoop recurse=yes state=directory
-  register: extract_dir
-  when: "{{item|changed}}"
-  with_items: "{{ (new_jobs|default({})).results|default([]) }}"
-
-- name: extract new jobs
-  command: tar -xvf "{{samza_jobs_dir}}/{{item.item | basename}}" -C "{{samza_jobs_dir}}/extract/{{item.item | basename }}"
-  when: "{{item|changed}}"
-  with_items: "{{ (new_jobs|default({})).results|default([]) }}"
-
-- name: Create Directory to extract new jobs
-  file: path={{samza_jobs_dir}}/extract/ owner=hduser group=hadoop recurse=yes
-
-- name: Get all new job configs
-  shell: "ls -d -1 {{item.path}}/config/*.properties"
-  register: config_files
-  when: "{{item|changed}}"
-  with_items: "{{ (extract_dir|default({})).results|default([]) }}"
-
-
-- name: update environment specific details in new job configs
-  replace: dest="{{item[1].stdout}}" regexp="{{item[0].key}}" replace="{{item[0].value}}"
-  when: "{{item[1]|changed}}"
-  with_nested:
-    - [{key: "__yarn_host__", value: "{{__yarn_host__}}"}, {key: "__yarn_port__", value: "{{__yarn_port__}}"}, {key: "__env__", value: "{{env_name}}" }, {key: "__zookeepers__", value: "{{zookeepers}}"}, {key: "__kafka_brokers__", value: "{{kafka_brokers}}"}, {key: "__lms_host__", value: "{{__lms_host__}}"}, {key: "__lms_es_port__", value: "{{sunbird_es_port}}"}, {key: "__lms_es_host__", value: "{{sunbird_es_host}}"}]
-    - "{{ (config_files|default({})).results|default([]) }}"
-
-- name: Update status of new jobs in file
-  command: bash -lc "./update_new_job_name.sh {{job_status_file}} {{samza_jobs_dir}}/extract/{{item.item | basename}}"
-  args:
-    chdir: "{{samza_jobs_dir}}/extract/"
-  when: "{{item|changed}}"
-  with_items: "{{ (new_jobs|default({})).results|default([]) }}"
-
-- name: Kill jobs
-  command: bash -lc "./kill_jobs.sh {{job_status_file}}"
-  args:
-    chdir: /usr/local/hadoop/bin
-
-- name: Start jobs
-  command: bash -lc "./start_jobs.sh {{job_status_file}} {{samza_jobs_dir}}/extract"
-  args:
-    chdir: "{{samza_jobs_dir}}/extract/"
-  become_user: hduser
-
-- name: Remove all old tar
-  command: bash -lc "./remove_old_tar.sh {{job_status_file}} {{samza_jobs_dir}}"
-  args:
-    chdir: "{{samza_jobs_dir}}/extract/"
-
-- file: path={{samza_jobs_dir}} owner=hduser group=hadoop state=directory recurse=yes
diff --git a/ansible/roles/samza-jobs/tasks/main.yml b/ansible/roles/samza-jobs/tasks/main.yml
index 0feb5dcd994a6828f46059024d5a3642a7ccf1a5..6352fe84f6875b6b437817af3147dc07bfe89b12 100644
--- a/ansible/roles/samza-jobs/tasks/main.yml
+++ b/ansible/roles/samza-jobs/tasks/main.yml
@@ -1,9 +1,73 @@
----
-- include: deploy.yml
-  when: deploy_jobs | default(false)
+ - name: Create Directory for Jobs
+   file: path={{ item }} owner=hduser group=hadoop recurse=yes state=directory
+   with_items:
+     - "{{ samza_jobs_dir }}"
+     - "{{ samza_jobs_dir }}/extract"
 
-- include: stop_jobs.yml
-  when: stop_jobs | default(false)
+ - name: Get the application id to kill the app
+   shell: "{{ yarn_path }}/yarn application --list | grep -i {{ item }} | awk '{print $1}'"
+   with_items: "{{ job_names_to_kill.split(',')|list }}"
+   register: appid
 
-- include: start_jobs.yml
-  when: start_jobs | default(false)
+ - name: Kill the mentioned applications
+   shell: "{{ yarn_path }}/yarn application -kill {{ item.stdout }}"
+   with_items:
+     - "{{ appid['results'] }}"
+   when: item.stdout | length > 0
+
+ - name: find the existing file names to remove
+   find:
+     paths: "{{ samza_jobs_dir }}"
+     patterns: "{{ job_names['%s'|format(item)].job_file_name }}*"
+     recurse: yes
+   with_items: "{{ job_names_to_kill.split(',') }}"
+   register: existing_files
+
+ - name: remove the files under "{{ samza_jobs_dir }}" directory
+   command: rm -rf "{{ item.path | basename }}"
+   with_items: "{{ existing_files | json_query('results[].files[]') }}"
+   args:
+      chdir: "{{ samza_jobs_dir }}"
+
+ - name: remove the files under "{{ samza_jobs_dir }}/extract" directory
+   command: rm -rf "{{ item.path | basename }}"
+   with_items: "{{ existing_files | json_query('results[].files[]') }}"
+   args:
+      chdir: "{{ samza_jobs_dir }}/extract"
+
+ - name: copy new jobs tar ball
+   copy: src={{ item }} dest={{ samza_jobs_dir }}/ force=no owner=hduser group=hadoop
+   with_fileglob:
+     - ../defaults/jobs/*
+   register: new_jobs
+
+ - name: Create directory to extract new jobs
+   file: path="{{ samza_jobs_dir }}/extract/{{ item }}" owner=hduser group=hadoop recurse=yes state=directory
+   with_items:
+     - "{{ new_jobs | json_query('results[].invocation.module_args.original_basename') }}"
+
+ - name: extract new jobs
+   unarchive:
+         src: "{{ samza_jobs_dir }}/{{ item }}"
+         dest: "{{ samza_jobs_dir }}/extract/{{ item }}"
+         remote_src: yes
+   with_items:
+     - "{{ new_jobs | json_query('results[].invocation.module_args.original_basename') }}"
+
+ - name: Get all new jobs config
+   shell: "ls -d -1 {{ samza_jobs_dir }}/extract/{{ item }}/config/*.properties"
+   register: config_files
+   with_items:
+     - "{{ new_jobs | json_query('results[].invocation.module_args.original_basename') }}"
+
+ - name: update environment specific details in new job configs
+   replace: dest="{{ item[1].stdout }}" regexp="{{ item[0].key }}" replace="{{ item[0].value }}"
+   with_nested:
+     - [{key: "__yarn_host__", value: "{{__yarn_host__}}"}, {key: "__yarn_port__", value: "{{__yarn_port__}}"}, {key: "__env__", value: "{{env_name}}" }, {key: "__zookeepers__", value: "{{zookeepers}}"}, {key: "__kafka_brokers__", value: "{{kafka_brokers}}"}, {key: "__lms_host__", value: "{{__lms_host__}}"}, {key: "__lms_es_port__", value: "{{sunbird_es_port}}"}, {key: "__lms_es_host__", value: "{{sunbird_es_host}}"}]
+     - "{{ config_files | json_query('results[]') }}"
+
+ - name: Start the jobs
+   shell: "{{ samza_jobs_dir }}/extract/{{ item.0 }}/bin/run-job.sh --config-factory=org.apache.samza.config.factories.PropertiesConfigFactory --config-path={{ item.1.stdout }}"
+   with_together:
+     - "{{ new_jobs | json_query('results[].invocation.module_args.original_basename') }}"
+     - "{{ config_files | json_query('results[]') }}"
diff --git a/ansible/roles/samza-jobs/tasks/start_jobs.yml b/ansible/roles/samza-jobs/tasks/start_jobs.yml
deleted file mode 100644
index 4bb0c65c9c58bd0667eaa3cb593fe692076b522a..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/tasks/start_jobs.yml
+++ /dev/null
@@ -1,21 +0,0 @@
----
-- name: Remove file of job status
-  file: path="{{job_status_file}}" state=absent
-  become: yes
-
-- name: Get job names from folder
-  command: bash -lc "./get_all_job_name.sh {{job_status_file}}"
-  args:
-    chdir: "{{samza_jobs_dir}}/extract"
-  become: yes
-
-- name: Ensure yarn resource manager is running
-  command: bash -lc "(ps aux | grep yarn-hduser-resourcemanager | grep -v grep) || /usr/local/hadoop/sbin/yarn-daemon.sh --config /usr/local/hadoop-{{hadoop_version}}/conf/ start resourcemanager"
-  become: yes
-  become_user: hduser
-
-- name: Start jobs
-  command: bash -lc "./start_jobs.sh {{job_status_file}} {{samza_jobs_dir}}/extract"
-  args:
-    chdir: "{{samza_jobs_dir}}/extract/"
-  become: yes
diff --git a/ansible/roles/samza-jobs/tasks/stop_jobs.yml b/ansible/roles/samza-jobs/tasks/stop_jobs.yml
deleted file mode 100644
index 1ef2f7b748366687005502f192bdc0092b75d08a..0000000000000000000000000000000000000000
--- a/ansible/roles/samza-jobs/tasks/stop_jobs.yml
+++ /dev/null
@@ -1,16 +0,0 @@
----
-- name: Remove file of job status
-  file: path="{{job_status_file}}" state=absent
-  become: yes
-
-- name: Get job names from folder
-  command: bash -lc "./get_all_job_name.sh {{job_status_file}}"
-  args:
-    chdir: "{{samza_jobs_dir}}/extract"
-  become: yes
-
-- name: Kill jobs
-  command: bash -lc "./kill_jobs.sh {{job_status_file}}"
-  args:
-    chdir: /usr/local/hadoop/bin
-  become: yes
diff --git a/ansible/roles/setup-kafka/defaults/main.yml b/ansible/roles/setup-kafka/defaults/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..1a2fb50cd595353f02d2d98accdda7a313648988
--- /dev/null
+++ b/ansible/roles/setup-kafka/defaults/main.yml
@@ -0,0 +1,23 @@
+env: dev
+
+processing_kafka_overriden_topics:
+  - name: lms.audit.events
+    retention_time: 172800000
+    replication_factor: 1
+  - name: lms.sso.events
+    retention_time: 172800000
+    replication_factor: 1
+  - name: lms.user.account.merge
+    retention_time: 172800000
+    replication_factor: 1
+
+processing_kafka_topics:
+  - name: lms.audit.events
+    num_of_partitions: 1
+    replication_factor: 1
+  - name: lms.sso.events
+    num_of_partitions: 4
+    replication_factor: 1
+  - name: lms.user.account.merge
+    num_of_partitions: 1
+    replication_factor: 1
diff --git a/ansible/roles/setup-kafka/tasks/main.yml b/ansible/roles/setup-kafka/tasks/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..b5811b969b2bfc4a33bda7409c22e3240ea1b985
--- /dev/null
+++ b/ansible/roles/setup-kafka/tasks/main.yml
@@ -0,0 +1,14 @@
+- name: create topics
+  command: /opt/kafka/bin/kafka-topics.sh --zookeeper localhost:2181 --create --topic {{env_name}}.{{item.name}} --partitions {{ item.num_of_partitions }} --replication-factor {{ item.replication_factor }}
+  with_items: "{{processing_kafka_topics}}"
+  ignore_errors: true
+  when: kafka_id=="1"
+  tags:
+    - processing-kafka
+
+- name: override retention time
+  command: /opt/kafka/bin/kafka-topics.sh --zookeeper localhost:2181 --alter --topic {{env_name}}.{{item.name}} --config retention.ms={{ item.retention_time }}
+  with_items: "{{processing_kafka_overriden_topics}}"
+  when: kafka_id=="1" and item.retention_time is defined  
+  tags:
+    - processing-kafka
diff --git a/ansible/roles/stack-proxy/templates/proxy-default.conf b/ansible/roles/stack-proxy/templates/proxy-default.conf
index 11810fec9fb7a7a65f643f0bb7adcc794f631257..3096693ca511754b2813bc8168f0e5f2f0cc54ca 100644
--- a/ansible/roles/stack-proxy/templates/proxy-default.conf
+++ b/ansible/roles/stack-proxy/templates/proxy-default.conf
@@ -135,6 +135,11 @@ server {
     proxy_send_timeout 60;
     proxy_read_timeout 70;
     proxy_http_version 1.1;
+    header_filter_by_lua_block {
+      local h = ngx.req.get_headers()
+
+      ngx.log(ngx.WARN, "Deviceid: ", h["x-device-id"], "  Channelid: ", h["x-channel-id"], "  Appid: ", h["x-app-id"])
+    }
     proxy_pass http://kong;
   }
 
diff --git a/ansible/samza_deploy.yml b/ansible/samza_deploy.yml
index de923114dcd39d7fd32e23f1b6c36d082c3fdf10..9ac34de840b0111376dbb318ee330694a702eb15 100644
--- a/ansible/samza_deploy.yml
+++ b/ansible/samza_deploy.yml
@@ -1,14 +1,34 @@
 ---
-- name: "Start Nodemanager on Slaves"
+- name: Move the selected samza app tar files to another dir
+  hosts: localhost
+  tasks:
+    - name: find the selected samza app tar files path
+      find:
+        paths: "{{job_workspace}}/{{ samza_tar_files_localpath }}/allfiles"
+        patterns: "{{ job_names['%s'|format(item)].job_file_name }}*"
+        recurse: yes
+      with_items: "{{ job_names_to_kill.split(',') }}"
+      register: existing_files
+
+    - name: Copy the selected samza app tar files to jobs folder
+      copy:
+       src: "{{ item }}"
+       dest: "{{job_workspace}}/{{ samza_tar_files_localpath }}/jobs"
+      with_items:
+         - "{{ existing_files | json_query('results[].files[].path') }}"
+
+- name: "Start Nodemanager on Slaves if stopped"
   hosts: "yarn-slave"
   vars:
     hadoop_version: 2.7.2
   become: yes
-  tasks:
+  pre_tasks:
     - name: Ensure yarn nodemanager is running
       become_user: hduser
       shell: |
-         (ps aux | grep yarn-hduser-nodemanager | grep -v grep) || /usr/local/hadoop/sbin/yarn-daemon.sh --config /usr/local/hadoop-{{hadoop_version}}/conf/ start nodemanager || sleep 10
+         (ps aux | grep yarn-hduser-nodemanager | grep -v grep) \
+         || /usr/local/hadoop/sbin/yarn-daemon.sh --config /usr/local/hadoop-{{ hadoop_version }}/conf/ start nodemanager \
+         || sleep 10
 
     - name: install imagemagick
       apt: name=imagemagick state=present update_cache=yes
@@ -16,9 +36,13 @@
 - name: "Deploy Samza jobs"
   hosts: "yarn-master"
   become: yes
+  become_user: hduser
   vars_files:
-    - "{{inventory_dir}}/secrets.yml"
-  vars:
-    deploy_jobs: true
+    - "{{ inventory_dir }}/secrets.yml"
+  pre_tasks:
+    - name: Ensure yarn resource manager is running
+      shell: |
+        (ps aux | grep yarn-hduser-resourcemanager | grep -v grep) \
+        || /usr/local/hadoop/sbin/yarn-daemon.sh --config /usr/local/hadoop-{{ hadoop_version }}/conf/ start resourcemanager
   roles:
     - samza-jobs
diff --git a/ansible/static-files/api_count_query.sh b/ansible/static-files/api_count_query.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ce22a25494cfa6a07d5ce524bfd1d4073b5c176e
--- /dev/null
+++ b/ansible/static-files/api_count_query.sh
@@ -0,0 +1,103 @@
+#!/bin/bash
+
+###-----------------------------------------------------###
+# Author:: Kaliraja
+# Description:: This script is to query the api calls data 
+# from log-es and send as a report to email and uploading
+# the same to azure storage.
+###-----------------------------------------------------###
+
+
+#date variables
+prev_day=`date "+%s" -d "yesterday 03:30:00"`
+today=`date "+%s" -d "today 03:30:00"`
+date=`date +"%m-%d-%Y"`
+
+#prev_day=`date "+%s" -d "yesterday -7 day 03:30:00"`
+#today=`date "+%s" -d "yesterday -6 day 03:30:00"`
+#date=`date +%m-%d-%y --date="yesterday -6 day" | sed 's/19/2019/'`
+
+#api variable
+contentsearch="/api/composite/v1/search"
+contentread="/api/content/v1/read/"
+telemetry="/api/data/v1/telemetry"
+registermobile="/api/api-manager/v1/consumer/mobile_device/credential/register"
+
+#filename variable
+contentsearch_filename=contentsearch-$date.txt
+contentread_filename=contentread-$date.txt
+telemetry_filename=telemetry-$date.txt
+mobiledevice_registerfilename=registermobile-$date.txt
+
+#sedngrid variable
+sguser="$1"
+sgpass="$2"
+container_name="$3"
+account_name="$4"
+storage_key="$5"
+
+
+query(){
+    curl -H 'Content-Type:application/json' -s -XPOST 'localhost:9200/logstash-*/_search?pretty' -d '{"query":{"bool":{"must":{"query_string":{"analyze_wildcard":true,"query":"\"'$1'\""}},"filter":{"bool":{"must":[{"range":{"@timestamp":{"gte":"'"$prev_day"'","lte":"'"$today"'","format":"epoch_second"}}}],"must_not":[]}}}},"size":0,"aggs":{"2":{"date_histogram":{"field":"@timestamp","interval":"15m","time_zone":"Asia/Kolkata","min_doc_count":1,"extended_bounds":{"min": 0,"max": 500}}}}}' |  jq -r '.aggregations."2".buckets[]|.key_as_string+" "+ (.doc_count|tostring)' | column -t > $2
+}
+
+#Executing content search query
+
+query $contentsearch $contentsearch_filename
+
+#Execurting the contentread query
+
+query $contentread $contentread_filename
+
+#Executing the telemetry query
+
+query $telemetry $telemetry_filename
+
+#Executing the registermobiledevice query
+
+query $registermobile $mobiledevice_registerfilename
+
+#sending an email with an attachment
+
+curl https://api.sendgrid.com/api/mail.send.json \
+ {{ api_report_mailing_list }} -F subject="Data for Diksha api calls" \
+ -F text="Data" --form-string html="<strong>Hi Team, PFA.</strong>" \
+ -F from=reports@diksha.in -F api_user="$sguser" -F api_key="$sgpass" \
+ -F files\[contentsearch.txt\]=@contentsearch-$date.txt -F files\[contentread.txt\]=@contentread-$date.txt -F files\[telemetry.txt]=@telemetry-$date.txt -F files\[registermobile.txt]=@registermobile-$date.txt
+
+
+# uploading  the reports to storage
+
+az storage blob upload \
+--container-name $container_name \
+--file contentsearch-$date.txt \
+--name contentsearch-$date.txt  \
+--account-name $account_name  \
+--account-key $storage_key
+
+az storage blob upload \
+--container-name $container_name \
+--file contentread-$date.txt \
+--name contentread-$date.txt  \
+--account-name $account_name  \
+--account-key $storage_key
+
+
+az storage blob upload \
+--container-name $container_name \
+--file telemetry-$date.txt \
+--name telemetry-$date.txt  \
+--account-name $account_name  \
+--account-key $storage_key
+
+az storage blob upload \
+--container-name $container_name \
+--file registermobile-$date.txt \
+--name registermobile-$date.txt  \
+--account-name $account_name  \
+--account-key $storage_key
+
+
+# deleting files
+
+rm *-$date.txt
diff --git a/pipelines/deploy/kafka/Jenkinsfile b/pipelines/deploy/kafka/Jenkinsfile
new file mode 100644
index 0000000000000000000000000000000000000000..7a99d974533e888e5d6fa847b4ff79fabd69e947
--- /dev/null
+++ b/pipelines/deploy/kafka/Jenkinsfile
@@ -0,0 +1,38 @@
+@Library('deploy-conf') _
+node() {
+    try {
+        String ANSI_GREEN = "\u001B[32m"
+        String ANSI_NORMAL = "\u001B[0m"
+        String ANSI_BOLD = "\u001B[1m"
+        String ANSI_RED = "\u001B[31m"
+        String ANSI_YELLOW = "\u001B[33m"
+
+        stage('checkout public repo') {
+            cleanWs()
+            checkout scm
+        }
+
+        ansiColor('xterm') {
+            stage('deploy'){
+                values = [:]
+                envDir = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-3].trim()
+                module = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-2].trim()
+                jobName = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-1].trim()
+                currentWs = sh(returnStdout: true, script: 'pwd').trim()
+                ansiblePlaybook = "${currentWs}/ansible/core_kafka_setup.yml"
+                ansibleExtraArgs = "--vault-password-file /var/lib/jenkins/secrets/vault-pass"
+                values.put('currentWs', currentWs)
+                values.put('env', envDir)
+                values.put('module', module)
+                values.put('jobName', jobName)
+                values.put('ansiblePlaybook', ansiblePlaybook)
+                values.put('ansibleExtraArgs', ansibleExtraArgs)
+                println values
+                ansible_playbook_run(values)
+            }
+        }
+    }
+    catch (err) {
+        throw err
+    }
+}
diff --git a/pipelines/deploy/yarn/Jenkinsfile b/pipelines/deploy/yarn/Jenkinsfile
index 1a74d8825c44e8faf5e69054b3883165cff27672..fa0749ff12371f17c1c64c5cf61f3b729c40b2b0 100644
--- a/pipelines/deploy/yarn/Jenkinsfile
+++ b/pipelines/deploy/yarn/Jenkinsfile
@@ -24,13 +24,14 @@ node() {
                 stage('deploy artifact'){
                     sh """
                        unzip ${artifact}
-                       mv distribution-* ansible
-                       rm -rf ansible/roles/samza-jobs/files/jobs
-                       mkdir  ansible/roles/samza-jobs/files/jobs
-                       tar -xvf ansible/distribution-* -C ansible/roles/samza-jobs/files/jobs/
+                       mv distribution-*.tar.gz ansible
+                       rm -rf ansible/roles/samza-jobs/defaults/jobs
+                       mkdir  ansible/roles/samza-jobs/defaults/jobs ansible/roles/samza-jobs/defaults/allfiles
+                       chmod 777 ansible/roles/samza-jobs/defaults/jobs
+                       tar -xvf ansible/distribution-*.tar.gz -C ansible/roles/samza-jobs/defaults/allfiles/
                     """
                     ansiblePlaybook = "${currentWs}/ansible/samza_deploy.yml"
-                    ansibleExtraArgs = "--vault-password-file /var/lib/jenkins/secrets/vault-pass"
+                    ansibleExtraArgs = "--extra-vars \"job_names_to_kill=${params.job_names_to_deploy} job_workspace=${WORKSPACE}/ansible\" --vault-password-file /var/lib/jenkins/secrets/vault-pass"
                     values.put('ansiblePlaybook', ansiblePlaybook)
                     values.put('ansibleExtraArgs', ansibleExtraArgs)
                     println values
@@ -52,3 +53,4 @@ node() {
         email_notify()
     }
 }
+
diff --git a/pipelines/offlineinstaller/Jenkinsfile b/pipelines/offlineinstaller/Jenkinsfile
index 8759afa424b7bfbd04f4a71d1d0ab263f5ca7a3e..92edb28068a89319442ca62a4cb0368976d159b1 100644
--- a/pipelines/offlineinstaller/Jenkinsfile
+++ b/pipelines/offlineinstaller/Jenkinsfile
@@ -23,7 +23,7 @@ node() {
                     stage('Pre requisites for installer') {
 			values = [:]
                     	ansiblePlaybook = "${currentWs}/ansible/offline-installer.yml --vault-password-file /var/lib/jenkins/secrets/vault-pass "
-		    	ansibleExtraArgs = " --extra-vars \"offline_repo_location=$currentWs build_number=$BUILD_NUMBER\" -vv"
+		    	ansibleExtraArgs = " --extra-vars \"offline_repo_location=$currentWs build_number=$BUILD_NUMBER offline_installer_type=${offline_installer_type}\" -vv"
 		        values.put('currentWs', currentWs)
 		        values.put('env', envDir)
                 	values.put('module', module)
@@ -34,7 +34,7 @@ node() {
                     	ansible_playbook_run(values)
                 }
 		try {
-		     stage('Build, create and upload installer to azure') {
+		     stage('Build Installer, create and upload it to azure') {
 			dir('offline-installer-repo') {
 			    sh """
                                bash -x build.sh
@@ -42,7 +42,7 @@ node() {
 			}
 			values = [:]
                         ansiblePlaybook = "${currentWs}/ansible/offline-installer.yml --vault-password-file /var/lib/jenkins/secrets/vault-pass "
-                        ansibleExtraArgs = " --extra-vars \"offline_repo_location=$currentWs build_number=$BUILD_NUMBER uploadInstaller=True \""
+                        ansibleExtraArgs = " --extra-vars \"offline_repo_location=$currentWs build_number=$BUILD_NUMBER uploadInstaller=True offline_installer_type=${offline_installer_type}\""
                         values.put('currentWs', currentWs)
                         values.put('env', envDir)
                         values.put('module', module)
@@ -56,7 +56,7 @@ node() {
 		catch (err) {
 			values = [:]
                         ansiblePlaybook = "${currentWs}/ansible/offline-installer.yml --vault-password-file /var/lib/jenkins/secrets/vault-pass "
-                        ansibleExtraArgs = " --extra-vars \"offline_repo_location=$currentWs removeOfflineInstallerFolder=True build_number=$BUILD_NUMBER\""
+                        ansibleExtraArgs = " --extra-vars \"offline_repo_location=$currentWs removeOfflineInstallerFolder=True build_number=$BUILD_NUMBER offline_installer_type=${offline_installer_type}\""
                         values.put('currentWs', currentWs)
                         values.put('env', envDir)
                         values.put('module', module)