Unverified Commit 4e6b8aa7 authored by G33tha's avatar G33tha Committed by GitHub
Browse files

Merge pull request #787 from harshavardhanc/release-2.4.0

Issue #00 feat: samza deploy role and jenkinsfile
parents f53d5d22 d8ed62a8
master login-changes loginRegisterBtn prasath-release-5.1.0 release-5.1.0 release-5.1.0-api release-5.1.0-apiuat release-5.1.0-bulk-enrol-unenrol release-5.1.0-uat release-5.1.0-uatLatest release-5.1.0-uatapi release-5.1.0-upsmf-prod release-uat-nginx revert-5-login-changes upstream/release-5.1.0-vdn 3nodeinstall 3node-prior-rebase-2-5 3node-2.6.0 3node 3node_bak_new temp-ratelimit_RC1 tag-for-backups tag-for-backups_RC1 tag-2.4.0 service-rename-player-2.5.0_RC9 service-rename-kong-2.5.0_RC4 revert-incorrect revert-942-release-2.6.0 revert-872-SC-1392-ftl-fix release-dock-0.0.3_RC2 release-dock-0.0.3_RC1 release-5.2.0-inquiry_RC1 release-5.1.0_RC3 release-5.1.0_RC2 release-5.1.0_RC1 release-5.0.2 release-5.0.2_RC1 release-5.0.1 release-5.0.1_RC1 release-5.0.0 release-5.0.0_RC5 release-5.0.0_RC4 release-5.0.0_RC3 release-5.0.0_RC2 release-5.0.0_RC1 release-5.0.0-vdn_RC1 release-4.10.2_RC1 release-4.10.1 release-4.10.1_RC2 release-4.10.1_RC1 release-4.10.0 release-4.10.0_RC1 release-4.10.0-vdn_RC1 release-4.9.0 release-4.9.0_RC4 release-4.9.0_RC3 release-4.9.0_RC2 release-4.9.0_RC1 release-4.9.0-vdn_RC2 release-4.9.0-vdn_RC1 release-4.8.0 release-4.8.0_RC8 release-4.8.0_RC7 release-4.8.0_RC6 release-4.8.0_RC5 release-4.8.0_RC4 release-4.8.0_RC3 release-4.8.0_RC2 release-4.8.0_RC1 release-4.8.0-vdn_RC1 release-4.7.0 release-4.7.0_RC3 release-4.7.0_RC2 release-4.7.0_RC1 release-4.7.0-vdn_RC1 release-4.6.0 release-4.6.0_RC4 release-4.6.0_RC3 release-4.6.0_RC2 release-4.6.0_RC1 release-4.5.0 release-4.5.0_RC6 release-4.5.0_RC5 release-4.5.0_RC4 release-4.5.0_RC3 release-4.5.0_RC2 release-4.5.0_RC1 release-4.4.1_RC2 release-4.4.1_RC1 release-4.4.1-vdn_RC2 release-4.4.1-vdn_RC1 release-4.4.0 release-4.4.0_RC10 release-4.4.0_RC9 release-4.4.0_RC8 release-4.4.0_RC7 release-4.4.0_RC6 release-4.4.0_RC5 release-4.4.0_RC4 release-4.4.0_RC3 release-4.4.0_RC2 release-4.4.0_RC1 release-4.3.0 release-4.3.0_RC7 release-4.3.0_RC6 release-4.3.0_RC5 release-4.3.0_RC4 release-4.3.0_RC3 release-4.3.0_RC2 release-4.3.0_RC1 release-4.2.0 release-4.2.0_RC6 release-4.2.0_RC5 release-4.2.0_RC4 release-4.2.0_RC3 release-4.2.0_RC2 release-4.2.0_RC1 release-4.1.0 release-4.1.0_RC20 release-4.1.0_RC19 release-4.1.0_RC18 release-4.1.0_RC17 release-4.1.0_RC16 release-4.1.0_RC15 release-4.1.0_RC14 release-4.1.0_RC13 release-4.1.0_RC12 release-4.1.0_RC11 release-4.1.0_RC10 release-4.1.0_RC9 release-4.1.0_RC8 release-4.1.0_RC7 release-4.1.0_RC6 release-4.1.0_RC5 release-4.1.0_RC4 release-4.1.0_RC3 release-4.1.0_RC2 release-4.1.0_RC1 release-4.0.0 release-4.0.0_RC6 release-4.0.0_RC5 release-4.0.0_RC4 release-4.0.0_RC3 release-4.0.0_RC2 release-4.0.0_RC1 release-3.9.0 release-3.9.0_RC18 release-3.9.0_RC17 release-3.9.0_RC16 release-3.9.0_RC15 release-3.9.0_RC14 release-3.9.0_RC13 release-3.9.0_RC12 release-3.9.0_RC11 release-3.9.0_RC10 release-3.9.0_RC9 release-3.9.0_RC8 release-3.9.0_RC7 release-3.9.0_RC6 release-3.9.0_RC5 release-3.9.0_RC4 release-3.9.0_RC3 release-3.9.0_RC2 release-3.9.0_RC1 release-3.8.0 release-3.8.0_RC14 release-3.8.0_RC13 release-3.8.0_RC12 release-3.8.0_RC11 release-3.8.0_RC10 release-3.8.0_RC9 release-3.8.0_RC8 release-3.8.0_RC7 release-3.8.0_RC6 release-3.8.0_RC5 release-3.8.0_RC4 release-3.8.0_RC3 release-3.8.0_RC2 release-3.8.0_RC1 release-3.7.0_RC16 release-3.7.0_RC15 release-3.7.0_RC14 release-3.7.0_RC13 release-3.7.0_RC12 release-3.7.0_RC11 release-3.7.0_RC10 release-3.7.0_RC9 release-3.7.0_RC8 release-3.7.0_RC7 release-3.7.0_RC6 release-3.7.0_RC5 release-3.7.0_RC4 release-3.7.0_RC3 release-3.7.0_RC2 release-3.7.0_RC1 release-3.6.5_RC1 release-3.6.0_RC8 release-3.6.0_RC7 release-3.6.0_RC6 release-3.6.0_RC5 release-3.6.0_RC4 release-3.6.0_RC3 release-3.6.0_RC2 release-3.6.0_RC1 release-3.5.0 release-3.5.0_RC8 release-3.5.0_RC7 release-3.5.0_RC6 release-3.5.0_RC5 release-3.5.0_RC4 release-3.5.0_RC3 release-3.5.0_RC2 release-3.5.0_RC1 release-3.4.0 release-3.4.0_RC21 release-3.4.0_RC20 release-3.4.0_RC19 release-3.4.0_RC18 release-3.4.0_RC17 release-3.4.0_RC16 release-3.4.0_RC15 release-3.4.0_RC14 release-3.4.0_RC13 release-3.4.0_RC12 release-3.4.0_RC11 release-3.4.0_RC10 release-3.4.0_RC9 release-3.4.0_RC8 release-3.4.0_RC7 release-3.4.0_RC6 release-3.4.0_RC5 release-3.4.0_RC4 release-3.4.0_RC3 release-3.4.0_RC2 release-3.4.0_RC1 release-3.3.0_RC24 release-3.3.0_RC23 release-3.3.0_RC22 release-3.3.0_RC21 release-3.3.0_RC20 release-3.3.0_RC19 release-3.3.0_RC18 release-3.3.0_RC17 release-3.3.0_RC16 release-3.3.0_RC15 release-3.3.0_RC14 release-3.3.0_RC13 release-3.3.0_RC12 release-3.3.0_RC11 release-3.3.0_RC10 release-3.3.0_RC9 release-3.3.0_RC8 release-3.3.0_RC7 release-3.3.0_RC6 release-3.3.0_RC5 release-3.3.0_RC4 release-3.3.0_RC3 release-3.3.0_RC2 release-3.3.0_RC1 release-3.2.7 release-3.2.7_RC14 release-3.2.7_RC13 release-3.2.7_RC12 release-3.2.7_RC11 release-3.2.7_RC10 release-3.2.7_RC9 release-3.2.7_RC8 release-3.2.7_RC7 release-3.2.7_RC6 release-3.2.7_RC5 release-3.2.7_RC4 release-3.2.7_RC3 release-3.2.7_RC2 release-3.2.7_RC1 release-3.2.0_RC21 release-3.2.0_RC20 release-3.2.0_RC19 release-3.2.0_RC18 release-3.2.0_RC17 release-3.2.0_RC16 release-3.2.0_RC15 release-3.2.0_RC14 release-3.2.0_RC13 release-3.2.0_RC12 release-3.2.0_RC11 release-3.2.0_RC10 release-3.2.0_RC9 release-3.2.0_RC8 release-3.2.0_RC7 release-3.2.0_RC6 release-3.2.0_RC5 release-3.2.0_RC4 release-3.2.0_RC3 release-3.2.0_RC2 release-3.2.0_RC1 release-3.1.0 release-3.1.0_RC10 release-3.1.0_RC9 release-3.1.0_RC8 release-3.1.0_RC7 release-3.1.0_RC6 release-3.1.0_RC5 release-3.1.0_RC4 release-3.1.0_RC3 release-3.1.0_RC2 release-3.1.0_RC1 release-3.0.1_RC7 release-3.0.1_RC6 release-3.0.1_RC5 release-3.0.1_RC4 release-3.0.1_RC3 release-3.0.1_RC2 release-3.0.1_RC1 release-3.0.0 release-3.0.0_RC42 release-3.0.0_RC41 release-3.0.0_RC40 release-3.0.0_RC39 release-3.0.0_RC38 release-3.0.0_RC37 release-3.0.0_RC36 release-3.0.0_RC35 release-3.0.0_RC34 release-3.0.0_RC33 release-3.0.0_RC32 release-3.0.0_RC31 release-3.0.0_RC30 release-3.0.0_RC29 release-3.0.0_RC28 release-3.0.0_RC27 release-3.0.0_RC26 release-3.0.0_RC25 release-3.0.0_RC24 release-3.0.0_RC23 release-3.0.0_RC22 release-3.0.0_RC21 release-3.0.0_RC20 release-3.0.0_RC19 release-3.0.0_RC18 release-3.0.0_RC17 release-3.0.0_RC16 release-3.0.0_RC15 release-3.0.0_RC14 release-3.0.0_RC13 release-3.0.0_RC12 release-3.0.0_RC11 release-3.0.0_RC10 release-3.0.0_RC9 release-3.0.0_RC8 release-3.0.0_RC7 release-3.0.0_RC6 release-3.0.0_RC5 release-3.0.0_RC4 release-3.0.0_RC3 release-3.0.0_RC2 release-3.0.0_RC1 release-2.10.0 release-2.10.0_RC24 release-2.10.0_RC23 release-2.10.0_RC22 release-2.10.0_RC21 release-2.10.0_RC20 release-2.10.0_RC19 release-2.10.0_RC18 release-2.10.0_RC17 release-2.10.0_RC16 release-2.10.0_RC15 release-2.10.0_RC14 release-2.10.0_RC13 release-2.10.0_RC12 release-2.10.0_RC11 release-2.10.0_RC10 release-2.10.0_RC9 release-2.10.0_RC8 release-2.10.0_RC7 release-2.10.0_RC6 release-2.10.0_RC5 release-2.10.0_RC4 release-2.10.0_RC3 release-2.10.0_RC2 release-2.10.0_RC1 release-2.9.0_RC13 release-2.9.0_RC12 release-2.9.0_RC11 release-2.9.0_RC10 release-2.9.0_RC9 release-2.9.0_RC8 release-2.9.0_RC7 release-2.9.0_RC6 release-2.9.0_RC5 release-2.9.0_RC4 release-2.9.0_RC3 release-2.9.0_RC2 release-2.9.0_RC1 release-2.8.0 release-2.8.0_RC34 release-2.8.0_RC33 release-2.8.0_RC32 release-2.8.0_RC31 release-2.8.0_RC30 release-2.8.0_RC29 release-2.8.0_RC28 release-2.8.0_RC27 release-2.8.0_RC26 release-2.8.0_RC25 release-2.8.0_RC24 release-2.8.0_RC23 release-2.8.0_RC22 release-2.8.0_RC21 release-2.8.0_RC20 release-2.8.0_RC19 release-2.8.0_RC18 release-2.8.0_RC17 release-2.8.0_RC16 release-2.8.0_RC15 release-2.8.0_RC14 release-2.8.0_RC13 release-2.8.0_RC12 release-2.8.0_RC11 release-2.8.0_RC10 release-2.8.0_RC9 release-2.8.0_RC8 release-2.8.0_RC7 release-2.8.0_RC6 release-2.8.0_RC5 release-2.8.0_RC4 release-2.8.0_RC3 release-2.8.0_RC2 release-2.8.0_RC1 release-2.8.0-desktopapis-hotfix release-2.7.0 release-2.7.0_RC16 release-2.7.0_RC15 release-2.7.0_RC14 release-2.7.0_RC13 release-2.7.0_RC12 release-2.7.0_RC11 release-2.7.0_RC10 release-2.7.0_RC9 release-2.7.0_RC8 release-2.7.0_RC7 release-2.7.0_RC6 release-2.7.0_RC5 release-2.7.0_RC4 release-2.7.0_RC3 release-2.7.0_RC2 release-2.7.0_RC1 release-2.6.5_RC3 release-2.6.5_RC2 release-2.6.5_RC1 release-2.6.5_Kong10_RC2 release-2.6.0 release-2.6.0_kong9 release-2.6.0_kong_RC1 release-2.6.0_RC25 release-2.6.0_RC24 release-2.6.0_RC23 release-2.6.0_RC22 release-2.6.0_RC21 release-2.6.0_RC20 release-2.6.0_RC19 release-2.6.0_RC18 release-2.6.0_RC17 release-2.6.0_RC16 release-2.6.0_RC15 release-2.6.0_RC14 release-2.6.0_RC13 release-2.6.0_RC12 release-2.6.0_RC11 release-2.6.0_RC10 release-2.6.0_RC9 release-2.6.0_RC8 release-2.6.0_RC7 release-2.6.0_RC6 release-2.6.0_RC5 release-2.6.0_RC4 release-2.6.0_RC3 release-2.6.0_RC2 release-2.6.0_RC1 release-2.6.0-k8s_9467d13 release-2.5.0 release-2.5.0_RC12 release-2.5.0_RC11 release-2.5.0_RC10 release-2.5.0_RC9 release-2.5.0_RC9_portalfix release-2.5.0_RC8 release-2.5.0_RC7 release-2.5.0_RC6 release-2.5.0_RC5 release-2.5.0_RC4 release-2.5.0_RC4_kong release-2.5.0_RC3 release-2.5.0_RC2 release-2.5.0_RC1 release-2.4.0 release-2.4.0_RC9 release-2.4.0_RC9_portal_RC2 release-2.4.0_RC9_portal_RC1 release-2.4.0_RC8 release-2.4.0_RC7 release-2.4.0_RC6 release-2.4.0_RC5 release-2.4.0_RC4 release-2.4.0_RC3 release-2.4.0_RC2 release-2.4.0_RC2_kong_9 release-2.4.0_RC1 release-2.3.5_RC5 portal-fix player-var original-tag-2.4.0 master_e26ae85 loadtest-release-2.10.0_nginx_prometheus kubernetes keycloak-3.2.0 keycloak-3.2.0_RC1 keycloak7_RC1 jenkins-config fix-tag-2.4.0 es-mapping dock-0.0.3_RC7 dock-0.0.3_RC6 dock-0.0.3_RC5 dock-0.0.3_RC4 dock-0.0.3_RC3 dock-0.0.3_RC2 dock-0.0.3_RC1 dock-0.0.3-before-delete dock-0.0.2_RC13 dock-0.0.2_RC12 dock-0.0.2_RC11 dock-0.0.2_RC10 dock-0.0.2_RC9 dock-0.0.2_RC8 dock-0.0.2_RC7 dock-0.0.2_RC6 dock-0.0.2_RC5 dock-0.0.2_RC4 dock-0.0.2_RC3 dock-0.0.2_RC2 dock-0.0.2_RC1 backup SMYALTAMASH-patch-1
No related merge requests found
Showing with 206 additions and 235 deletions
+206 -235
---
- hosts: processing-cluster-kafka
become: yes
pre_tasks:
- name: Registering kafka_id
set_fact:
kafka_id: "{% for servername in play_hosts %}{% if inventory_hostname==servername %}{{ loop.index }}{% endif %}{% endfor %}"
- name: Print kafka_id
debug:
var: kafka_id
roles:
- setup-kafka
...@@ -658,3 +658,13 @@ kafka_urls: "{{groups['processing-cluster-kafka']|join(':9092,')}}:9092" ...@@ -658,3 +658,13 @@ kafka_urls: "{{groups['processing-cluster-kafka']|join(':9092,')}}:9092"
kafka_topics_certificate_instruction: "{{env_name}}.coursebatch.certificate.request" kafka_topics_certificate_instruction: "{{env_name}}.coursebatch.certificate.request"
cert_service_container_name: "{{env}}-e-credentials" cert_service_container_name: "{{env}}-e-credentials"
cert_service_cloud_storage_type: "{{cert_service_cloud_storage_type}}" cert_service_cloud_storage_type: "{{cert_service_cloud_storage_type}}"
### Release-2.4.0 ###
samza_tar_files_localpath: roles/samza-jobs/defaults
job_names:
lms.user-account-merger_1:
job_file_name: 'user-account-merge'
lms.sso-account-updater_1:
job_file_name: 'sso-account-updater'
lms.indexer_1:
job_file_name: 'indexer'
...@@ -6,5 +6,11 @@ hadoop_version: 2.7.2 ...@@ -6,5 +6,11 @@ hadoop_version: 2.7.2
__yarn_port__: 8000 __yarn_port__: 8000
cassandra_port: 9042 cassandra_port: 9042
es_port: 9200 es_port: 9200
samza_tar_files_localpath: roles/samza-jobs/defaults
#telemetry_extractor_container_memory_mb: 1024 job_names:
lms.user-account-merger_1:
job_file_name: 'user-account-merge'
lms.sso-account-updater_1:
job_file_name: 'sso-account-updater'
lms.indexer_1:
job_file_name: 'indexer'
sed -n "/job\.name.*$/ p" $1 | sed -n "s/=/\\t/g p" | cut -f 2
\ No newline at end of file
#!/usr/bin/env bash
find . -name "*.properties" | while read fname; do
job_name=`sed -n "/^job\.name.*$/ p" $fname | sed -n "s/=/\\t/g p" | cut -f 2`
folder_path=$(dirname `dirname "$fname"`)
folder_name=`basename $folder_path`
echo "$folder_name:$job_name:---:stopped"
done > $1
#!/usr/bin/env bash
./yarn application -list | cut -f 2 | sed 1,'/Application-Name/'d
\ No newline at end of file
#!/usr/bin/env bash
job_names=(`./yarn application -list | cut -f 2 | sed 1,'/Application-Name/'d | sed 's/_1$//'`)
job_ids=(`./yarn application -list | cut -f 1 | sed 1,'/Application-Id/'d`)
count=${#job_names[@]}
for (( i=0; i<${count}; i++ ));
do
job_name=${job_names[i]}
job_id=${job_ids[i]}
`sed -i /$job_name/s/stopped/started/g $1`
`sed -i /$job_name/s/---/$job_id/g $1`
done
#!/usr/bin/env bash
./yarn application -list > applist.txt
sed -n "/$1.*$/ p" applist.txt | cut -f 1 > temp.txt
while read in;
do
./yarn application -kill "$in";
done < temp.txt
rm temp.txt
rm applist.txt
\ No newline at end of file
#!/usr/bin/env bash
cat $1 | while read LINE
do
application_id=`echo $LINE | awk -F':' '{print $3}'`;
status=`echo $LINE | awk -F':' '{print $4}'`;
if [ "$status" == "restart" ]
then
./yarn application -kill $application_id
fi
done
\ No newline at end of file
#!/usr/bin/env bash
cat $1 | awk -F':' '{print $1}' > tmp.txt
DIRS=`ls -l $2/extract/ | egrep '^d'| awk '{print $9}'`
for dir in $DIRS
do
if ! grep -Fxq $dir tmp.txt
then
rm -rf $dir
rm $2/$dir
fi
done
rm tmp.txt
\ No newline at end of file
#!/usr/bin/env bash
folder_path=$2
cat $1 | while read LINE
do
dir_name=`echo $LINE | awk -F':' '{print $1}'`;
job_name=`echo $LINE | awk -F':' '{print $2}'`;
application_id=`echo $LINE | awk -F':' '{print $3}'`;
status=`echo $LINE | awk -F':' '{print $4}'`;
properties_path="$folder_path/$dir_name/config/*.properties"
config_file_path=`ls -d $properties_path`
if [ "$status" == "stopped" ] || [ "$status" == "restart" ]
then
./$dir_name/bin/run-job.sh --config-factory=org.apache.samza.config.factories.PropertiesConfigFactory --config-path=file:///$config_file_path
fi
done
\ No newline at end of file
#!/usr/bin/env bash
find $2 -name "*.properties" | while read fname; do
job_name=`sed -n "/^job\.name.*$/ p" $fname | sed -n "s/=/\\t/g p" | cut -f 2`
folder_path=$(dirname `dirname "$fname"`)
folder_name=`basename $folder_path`
if grep -Fwq $job_name $1
then
`sed -i /$job_name/s/^.*\.gz/$folder_name/ $1`;
`sed -i /$job_name/s/started/restart/ $1`;
else
echo "adding"
echo "$folder_name:$job_name:---:stopped" >> $1
fi
done
\ No newline at end of file
---
- name: Create Directory for Jobs
file: path={{item}} owner=hduser group=hadoop recurse=yes state=directory
with_items:
- "{{samza_jobs_dir}}"
- "{{samza_jobs_dir}}/extract"
- name: Copy script to get all running jobs
copy: src=get_all_running_app_name.sh dest=/usr/local/hadoop/bin owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
- name: Copy script to get all job names
copy: src=get_all_job_name.sh dest="{{samza_jobs_dir}}/extract" owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
- name: Copy script to get updated job names from extracted tar
copy: src=update_new_job_name.sh dest="{{samza_jobs_dir}}/extract" owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
- name: Copy script to start jobs based on the status
copy: src=start_jobs.sh dest="{{samza_jobs_dir}}/extract" owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
- name: Copy script to remove old job tar
copy: src=remove_old_tar.sh dest="{{samza_jobs_dir}}/extract" owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
- name: Copy script to kill jobs based on the status
copy: src=kill_jobs.sh dest=/usr/local/hadoop/bin owner=hduser group=hadoop mode="u=rwx,g=rx,o=r"
- name: Remove file of job status
file: path="{{job_status_file}}" state=absent
- name: Get job names from folder
command: bash -lc "./get_all_job_name.sh {{job_status_file}}"
args:
chdir: "{{samza_jobs_dir}}/extract"
- name: Ensure yarn resource manager is running
command: bash -lc "(ps aux | grep yarn-hduser-resourcemanager | grep -v grep) || /usr/local/hadoop/sbin/yarn-daemon.sh --config /usr/local/hadoop-{{hadoop_version}}/conf/ start resourcemanager"
become: yes
become_user: hduser
- name: Update status of running job in file
command: bash -lc "./get_all_running_app_name.sh {{job_status_file}}"
args:
chdir: /usr/local/hadoop/bin
- name: copy new jobs tar ball
copy: src={{ item }} dest={{samza_jobs_dir}}/ force=no owner=hduser group=hadoop
with_fileglob:
- ./jobs/*
register: new_jobs
- name: Create Directory to extract new jobs
file: path={{samza_jobs_dir}}/extract/{{item.item | basename }} owner=hduser group=hadoop recurse=yes state=directory
register: extract_dir
when: "{{item|changed}}"
with_items: "{{ (new_jobs|default({})).results|default([]) }}"
- name: extract new jobs
command: tar -xvf "{{samza_jobs_dir}}/{{item.item | basename}}" -C "{{samza_jobs_dir}}/extract/{{item.item | basename }}"
when: "{{item|changed}}"
with_items: "{{ (new_jobs|default({})).results|default([]) }}"
- name: Create Directory to extract new jobs
file: path={{samza_jobs_dir}}/extract/ owner=hduser group=hadoop recurse=yes
- name: Get all new job configs
shell: "ls -d -1 {{item.path}}/config/*.properties"
register: config_files
when: "{{item|changed}}"
with_items: "{{ (extract_dir|default({})).results|default([]) }}"
- name: update environment specific details in new job configs
replace: dest="{{item[1].stdout}}" regexp="{{item[0].key}}" replace="{{item[0].value}}"
when: "{{item[1]|changed}}"
with_nested:
- [{key: "__yarn_host__", value: "{{__yarn_host__}}"}, {key: "__yarn_port__", value: "{{__yarn_port__}}"}, {key: "__env__", value: "{{env_name}}" }, {key: "__zookeepers__", value: "{{zookeepers}}"}, {key: "__kafka_brokers__", value: "{{kafka_brokers}}"}, {key: "__lms_host__", value: "{{__lms_host__}}"}, {key: "__lms_es_port__", value: "{{sunbird_es_port}}"}, {key: "__lms_es_host__", value: "{{sunbird_es_host}}"}]
- "{{ (config_files|default({})).results|default([]) }}"
- name: Update status of new jobs in file
command: bash -lc "./update_new_job_name.sh {{job_status_file}} {{samza_jobs_dir}}/extract/{{item.item | basename}}"
args:
chdir: "{{samza_jobs_dir}}/extract/"
when: "{{item|changed}}"
with_items: "{{ (new_jobs|default({})).results|default([]) }}"
- name: Kill jobs
command: bash -lc "./kill_jobs.sh {{job_status_file}}"
args:
chdir: /usr/local/hadoop/bin
- name: Start jobs
command: bash -lc "./start_jobs.sh {{job_status_file}} {{samza_jobs_dir}}/extract"
args:
chdir: "{{samza_jobs_dir}}/extract/"
become_user: hduser
- name: Remove all old tar
command: bash -lc "./remove_old_tar.sh {{job_status_file}} {{samza_jobs_dir}}"
args:
chdir: "{{samza_jobs_dir}}/extract/"
- file: path={{samza_jobs_dir}} owner=hduser group=hadoop state=directory recurse=yes
--- - name: Create Directory for Jobs
- include: deploy.yml file: path={{ item }} owner=hduser group=hadoop recurse=yes state=directory
when: deploy_jobs | default(false) with_items:
- "{{ samza_jobs_dir }}"
- "{{ samza_jobs_dir }}/extract"
- include: stop_jobs.yml - name: Get the application id to kill the app
when: stop_jobs | default(false) shell: "{{ yarn_path }}/yarn application --list | grep -i {{ item }} | awk '{print $1}'"
with_items: "{{ job_names_to_kill.split(',')|list }}"
register: appid
- include: start_jobs.yml - name: Kill the mentioned applications
when: start_jobs | default(false) shell: "{{ yarn_path }}/yarn application -kill {{ item.stdout }}"
with_items:
- "{{ appid['results'] }}"
when: item.stdout | length > 0
- name: find the existing file names to remove
find:
paths: "{{ samza_jobs_dir }}"
patterns: "{{ job_names['%s'|format(item)].job_file_name }}*"
recurse: yes
with_items: "{{ job_names_to_kill.split(',') }}"
register: existing_files
- name: remove the files under "{{ samza_jobs_dir }}" directory
command: rm -rf "{{ item.path | basename }}"
with_items: "{{ existing_files | json_query('results[].files[]') }}"
args:
chdir: "{{ samza_jobs_dir }}"
- name: remove the files under "{{ samza_jobs_dir }}/extract" directory
command: rm -rf "{{ item.path | basename }}"
with_items: "{{ existing_files | json_query('results[].files[]') }}"
args:
chdir: "{{ samza_jobs_dir }}/extract"
- name: copy new jobs tar ball
copy: src={{ item }} dest={{ samza_jobs_dir }}/ force=no owner=hduser group=hadoop
with_fileglob:
- ../defaults/jobs/*
register: new_jobs
- name: Create directory to extract new jobs
file: path="{{ samza_jobs_dir }}/extract/{{ item }}" owner=hduser group=hadoop recurse=yes state=directory
with_items:
- "{{ new_jobs | json_query('results[].invocation.module_args.original_basename') }}"
- name: extract new jobs
unarchive:
src: "{{ samza_jobs_dir }}/{{ item }}"
dest: "{{ samza_jobs_dir }}/extract/{{ item }}"
remote_src: yes
with_items:
- "{{ new_jobs | json_query('results[].invocation.module_args.original_basename') }}"
- name: Get all new jobs config
shell: "ls -d -1 {{ samza_jobs_dir }}/extract/{{ item }}/config/*.properties"
register: config_files
with_items:
- "{{ new_jobs | json_query('results[].invocation.module_args.original_basename') }}"
- name: update environment specific details in new job configs
replace: dest="{{ item[1].stdout }}" regexp="{{ item[0].key }}" replace="{{ item[0].value }}"
with_nested:
- [{key: "__yarn_host__", value: "{{__yarn_host__}}"}, {key: "__yarn_port__", value: "{{__yarn_port__}}"}, {key: "__env__", value: "{{env_name}}" }, {key: "__zookeepers__", value: "{{zookeepers}}"}, {key: "__kafka_brokers__", value: "{{kafka_brokers}}"}, {key: "__lms_host__", value: "{{__lms_host__}}"}, {key: "__lms_es_port__", value: "{{sunbird_es_port}}"}, {key: "__lms_es_host__", value: "{{sunbird_es_host}}"}]
- "{{ config_files | json_query('results[]') }}"
- name: Start the jobs
shell: "{{ samza_jobs_dir }}/extract/{{ item.0 }}/bin/run-job.sh --config-factory=org.apache.samza.config.factories.PropertiesConfigFactory --config-path={{ item.1.stdout }}"
with_together:
- "{{ new_jobs | json_query('results[].invocation.module_args.original_basename') }}"
- "{{ config_files | json_query('results[]') }}"
---
- name: Remove file of job status
file: path="{{job_status_file}}" state=absent
become: yes
- name: Get job names from folder
command: bash -lc "./get_all_job_name.sh {{job_status_file}}"
args:
chdir: "{{samza_jobs_dir}}/extract"
become: yes
- name: Ensure yarn resource manager is running
command: bash -lc "(ps aux | grep yarn-hduser-resourcemanager | grep -v grep) || /usr/local/hadoop/sbin/yarn-daemon.sh --config /usr/local/hadoop-{{hadoop_version}}/conf/ start resourcemanager"
become: yes
become_user: hduser
- name: Start jobs
command: bash -lc "./start_jobs.sh {{job_status_file}} {{samza_jobs_dir}}/extract"
args:
chdir: "{{samza_jobs_dir}}/extract/"
become: yes
---
- name: Remove file of job status
file: path="{{job_status_file}}" state=absent
become: yes
- name: Get job names from folder
command: bash -lc "./get_all_job_name.sh {{job_status_file}}"
args:
chdir: "{{samza_jobs_dir}}/extract"
become: yes
- name: Kill jobs
command: bash -lc "./kill_jobs.sh {{job_status_file}}"
args:
chdir: /usr/local/hadoop/bin
become: yes
env: dev
processing_kafka_overriden_topics:
- name: lms.audit.events
retention_time: 172800000
replication_factor: 1
- name: lms.sso.events
retention_time: 172800000
replication_factor: 1
- name: lms.user.account.merge
retention_time: 172800000
replication_factor: 1
processing_kafka_topics:
- name: lms.audit.events
num_of_partitions: 1
replication_factor: 1
- name: lms.sso.events
num_of_partitions: 4
replication_factor: 1
- name: lms.user.account.merge
num_of_partitions: 1
replication_factor: 1
- name: create topics
command: /opt/kafka/bin/kafka-topics.sh --zookeeper localhost:2181 --create --topic {{env_name}}.{{item.name}} --partitions {{ item.num_of_partitions }} --replication-factor {{ item.replication_factor }}
with_items: "{{processing_kafka_topics}}"
ignore_errors: true
when: kafka_id=="1"
tags:
- processing-kafka
- name: override retention time
command: /opt/kafka/bin/kafka-topics.sh --zookeeper localhost:2181 --alter --topic {{env_name}}.{{item.name}} --config retention.ms={{ item.retention_time }}
with_items: "{{processing_kafka_overriden_topics}}"
when: kafka_id=="1" and item.retention_time is defined
tags:
- processing-kafka
--- ---
- name: "Start Nodemanager on Slaves" - name: Move the selected samza app tar files to another dir
hosts: localhost
tasks:
- name: find the selected samza app tar files path
find:
paths: "{{job_workspace}}/{{ samza_tar_files_localpath }}/allfiles"
patterns: "{{ job_names['%s'|format(item)].job_file_name }}*"
recurse: yes
with_items: "{{ job_names_to_kill.split(',') }}"
register: existing_files
- name: Copy the selected samza app tar files to jobs folder
copy:
src: "{{ item }}"
dest: "{{job_workspace}}/{{ samza_tar_files_localpath }}/jobs"
with_items:
- "{{ existing_files | json_query('results[].files[].path') }}"
- name: "Start Nodemanager on Slaves if stopped"
hosts: "yarn-slave" hosts: "yarn-slave"
vars: vars:
hadoop_version: 2.7.2 hadoop_version: 2.7.2
become: yes become: yes
tasks: pre_tasks:
- name: Ensure yarn nodemanager is running - name: Ensure yarn nodemanager is running
become_user: hduser become_user: hduser
shell: | shell: |
(ps aux | grep yarn-hduser-nodemanager | grep -v grep) || /usr/local/hadoop/sbin/yarn-daemon.sh --config /usr/local/hadoop-{{hadoop_version}}/conf/ start nodemanager || sleep 10 (ps aux | grep yarn-hduser-nodemanager | grep -v grep) \
|| /usr/local/hadoop/sbin/yarn-daemon.sh --config /usr/local/hadoop-{{ hadoop_version }}/conf/ start nodemanager \
|| sleep 10
- name: install imagemagick - name: install imagemagick
apt: name=imagemagick state=present update_cache=yes apt: name=imagemagick state=present update_cache=yes
...@@ -16,9 +36,13 @@ ...@@ -16,9 +36,13 @@
- name: "Deploy Samza jobs" - name: "Deploy Samza jobs"
hosts: "yarn-master" hosts: "yarn-master"
become: yes become: yes
become_user: hduser
vars_files: vars_files:
- "{{inventory_dir}}/secrets.yml" - "{{ inventory_dir }}/secrets.yml"
vars: pre_tasks:
deploy_jobs: true - name: Ensure yarn resource manager is running
shell: |
(ps aux | grep yarn-hduser-resourcemanager | grep -v grep) \
|| /usr/local/hadoop/sbin/yarn-daemon.sh --config /usr/local/hadoop-{{ hadoop_version }}/conf/ start resourcemanager
roles: roles:
- samza-jobs - samza-jobs
@Library('deploy-conf') _
node() {
try {
String ANSI_GREEN = "\u001B[32m"
String ANSI_NORMAL = "\u001B[0m"
String ANSI_BOLD = "\u001B[1m"
String ANSI_RED = "\u001B[31m"
String ANSI_YELLOW = "\u001B[33m"
stage('checkout public repo') {
cleanWs()
checkout scm
}
ansiColor('xterm') {
stage('deploy'){
values = [:]
envDir = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-3].trim()
module = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-2].trim()
jobName = sh(returnStdout: true, script: "echo $JOB_NAME").split('/')[-1].trim()
currentWs = sh(returnStdout: true, script: 'pwd').trim()
ansiblePlaybook = "${currentWs}/ansible/core_kafka_setup.yml"
ansibleExtraArgs = "--vault-password-file /var/lib/jenkins/secrets/vault-pass"
values.put('currentWs', currentWs)
values.put('env', envDir)
values.put('module', module)
values.put('jobName', jobName)
values.put('ansiblePlaybook', ansiblePlaybook)
values.put('ansibleExtraArgs', ansibleExtraArgs)
println values
ansible_playbook_run(values)
}
}
}
catch (err) {
throw err
}
}
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment