From f86dc086bf808901009ffcb5885f9c51f0d9a5c5 Mon Sep 17 00:00:00 2001 From: Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> Date: Mon, 27 Jun 2022 16:58:36 -0700 Subject: [PATCH] parent 3d7637ac459e56c4bc9e6b6ca8739390b6245a81 author Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> 1656374316 -0700 committer pgodithi 1657742833 -0400 gpgsig -----BEGIN PGP SIGNATURE----- iQIzBAABCAAdFiEEuaPMeSek3p9IWNzDMSDf0RO9/aIFAmLPJfEACgkQMSDf0RO9 /aJ5lg//XDWAin3VWM7Vf8vJJbZfc9Wwpvvkiq/2B3onrRdT0yF8noqs7f/VNEf0 eS2DZShePL4+NkzHOBBjNaVPQKC3TabrVRUqdTEE8Hw1FmxyJagET5vcNEADdy17 AOrFIaLLVpnu9I2w2noVVwxsNy5dcwI0XrOx+ihaDrMgTcUImsX1L7R2nNjykG46 zW4Uc6eilAv3hBtLv7syZ7dlSmZncl659yf1zGeJSiiQVCJ6Wz/fzdwFIOO+/p0C arJIWRNlTNUCsPeDM4zjz43ovpwVJRnmFHUB9BAU2NLQfomzTiYYy1emISznLYf3 vwqqV2j9MB5f16Iq1vbQNxX0EemBaI862AnwPN2nLD8r8e190wzJZliqOMmjY+57 mCuld+5rUG/3AJLxYyo/QNcmh1WfR92eD+BbfVdAD0IRi8VG8YQfIHXXV3vboUP7 4ga0fOU+ozO+OAnCidMrX4xqWGm4dAyWbWUVPV86GlAGecc7GISjC0iKLg4lT+NS 0oqlrX7WjWByXCM3zvOd6w7v57WRqGSsbao72669JqMzedZzPz/CdEYykuyV4YW3 RPMzQHikHjg8Hnu2KMkCo3So+lRerQnhpamyCtSIDeJ2B6xb3j+b/l846DizsOOi I8y3235tXXU4GyCUfGtEptL6bcU0FLvp62GDprP28gK7A8X7J7k= =2Ezc -----END PGP SIGNATURE----- parent 3d7637ac459e56c4bc9e6b6ca8739390b6245a81 author Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> 1656374316 -0700 committer pgodithi 1657742800 -0400 gpgsig -----BEGIN PGP SIGNATURE----- iQIzBAABCAAdFiEEuaPMeSek3p9IWNzDMSDf0RO9/aIFAmLPJdAACgkQMSDf0RO9 /aJgdhAAtxGe94TqxE0dTv6YRFKLgwsQG3bhH8aXgkOrUXP3AffZlXjG0FMi8jS5 XRh/EXwdS0ggtxHslczQZhY10h1kHjEn+7Ir0EBMQFhRz5gQwidSaqLxL/gE+ZRS e8ZGyE2fFdV9ddYnkxMElLwrcaGgw9shSCFHfoWfUpQ9z1peACFitKd4zo78+bw3 TncbYyAkgAbPF26HfnaDzM/WLY+j1oIDz64Nt2f7W7cuMZNv4rDfc1ko/Kai8BdC eLmoES9svXKV3GZPVGKoll5heRMrjf0c/Ka0lwc7cynq4c85cc7dnb8XmG6EeXTJ SeCvtexfgAAyWqMmsfc98JqMUoJ/IOsOWkqBlhn/9esvUyQU+iWFrG9nlhgkBJka 65/LOO8UNOkIzZ1gb4rHTljnqj+SSyw9cfOTT4BP6KJAuNRcM3EW5vHWdcisQzNe lX4HK5Is+OjL4T3FN6jyjWn2c2515mQA74bp6EcrUBHz54Xf0zM4vKpezMrwlMR2 hqhvN7N8RlnFKBdeTmxjuxBjNT5qqWgTZeMUx9qEAm9n5d6h8hvpp3C4y6UmxogU SUgQsYiASQEO7aN6HHp25/mvMLII9fK5WLhnFe3cwyvt3xWz7vHWLQo7r14ApNHW VMCLKOG0+AXVnDuYH+bGQWDaxS+rShzAmOx90XsuYdbsUTt/Q80= =W/gW -----END PGP SIGNATURE----- parent 3d7637ac459e56c4bc9e6b6ca8739390b6245a81 author Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> 1656374316 -0700 committer pgodithi 1657742772 -0400 gpgsig -----BEGIN PGP SIGNATURE----- iQIzBAABCAAdFiEEuaPMeSek3p9IWNzDMSDf0RO9/aIFAmLPJbQACgkQMSDf0RO9 /aLt6w/9GRuKWqYSzOLodzmOzedRT7Oz4J0dF1UQfhKiLTCd4ZCrbBOrIZ6g3fZn r+s4LY/v0ls/nNI7xvhCfQMoxOm0U2l5VwdSsjrztNaiLrjawmVvWo28ILjQ7oQU g748+7f3LB7Eeljs70JB+hjVNylWPIX2DUdhRY8NGU/AaNFSyXxGJsH8iygTEBnb r2wMN7mH/hleCQa6li//0qMWDNGGt65KF7ew0JUKzi419AKB2HoWxp+E1KanM25J aWmepjf0aHNwr6hHnmu0FC83s7Z2zMc+kyrAk32hT4dwchT66aDeo3SRomZ7KNhs 6TgBYIxupLD/kcbX9a5FFKeZFp/BT/j0YGdIBCh7AbP5PKnNzdlo+cnCXy161Ykl D8sH93gyskACgbyUOHY+lrQ+QbL6HrJk4WjF6537bgvhvLh7xXym8STdeb/xkjNI HX2dTquizjMidGQEG2bjfLKVFZ3CMoccqyVuvKJ2qreRTjCRHPyQBuqPXh/apMFe UiM9CRWULLFFDCdNR1pokdqaaSEZnG/fuK5YhgjUQp11RwxcizxzorSc7QMMsv6W hw18pOxB3oIHURpny20s+AKgLDt8/C5XytCYB7PEhObjh2uHOuEYWv/6xtOeGpW5 Y2dsM1kHqPBt5jeaEDxi4CkL6z9Ja5zB7ph1XxsMqN+MgKRQqIU= =dgY6 -----END PGP SIGNATURE----- parent 3d7637ac459e56c4bc9e6b6ca8739390b6245a81 author Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> 1656374316 -0700 committer pgodithi 1657742666 -0400 gpgsig -----BEGIN PGP SIGNATURE----- iQIzBAABCAAdFiEEuaPMeSek3p9IWNzDMSDf0RO9/aIFAmLPJUoACgkQMSDf0RO9 /aJsqw//SKjS/tnNT0lehCMI0+xGXAP3frpYYgotpSu5Rl3W0mt8lzUHMvh5enFC 3v61GvwaBx5Fuktv7pexLaVHrJcjo1DKs6IHIIfxIgKG4K+eU9izhHjCtksT8mT4 JSINHtBCGalVoDBe6pgEjGuIozVKvxbYWj37S2y5iE1JpwjcNyOCCB0H8XecT/PB ojhrTZoerS1WA9rUH0J87KQGjMSUoE9wl5rYJQoiXHywzuZF1DzXgsxJbaSWyzwq LWC434mBXFdEx8qKPv+3cnSHyqK4XnWFc+xVD0ZfhIhgpHP5Rx0nL5vbSfvZ4NT/ Wi2mTI37uFahShN3dZVLJZhrmDhtIYRqAJ/wWVdq8yPKTLM89kXki1kDZBtKEBdV J0IGVuwTUVfnSkQhC1kh1AQuQ0BcuI7zIo+3j/dqx1e80XDlpYr/vP7gnjgW6/ft cS2tpgne4ekJQOQDN0u5dUG3U2NOx04SymKHMoHSi3qW9LQ7rMtbBbWx+n/s9gjD uhJAbKR1YXS0JfxG1MtGJ1so1UjXIrWafiFjOblKK33YzwcV8qkkbso6GrzQQpaK gBbbbwrIdWaW20k0vOR+/QWqboM3HJARc+w29nRQBXSopkeEljhURktrTuEuEF1K sQU86AViCs+ErEx6rEKzCm9fRqN9IS3pvDH9R3jqnQkynKbSjs4= =NUNu -----END PGP SIGNATURE----- parent 3d7637ac459e56c4bc9e6b6ca8739390b6245a81 author Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> 1656374316 -0700 committer pgodithi 1657742210 -0400 gpgsig -----BEGIN PGP SIGNATURE----- iQIzBAABCAAdFiEEuaPMeSek3p9IWNzDMSDf0RO9/aIFAmLPI4MACgkQMSDf0RO9 /aI1LQ/+OCw5cDMKVjZxd45jHvaiGbrE6KLTIUZgkwuf6m64Z6MEw6Ruc1/FatoJ xOVRF2nJTaszO8mtSIw3mjRImGJU43TrBOt9DUcj5o89aD0TP3NtW4r5Q608VGzi ITjrYkXiTBYy5KLRzGCEdgV/AGhnaah0w3nvqLu8JDaa+qxrldHpqszUH1EL/bHk jM+ILaZ6UzKubaFc/yxzz9ocE+/k3cunpC8w8IiPlAKyJGmENxxTnhYl5JgpbJ+V KgRQc0eFYtKRyq+Q17deG6O25NT8R+4DflSG/Hn7pc7pQMIRWM8qhvPHTFR+xC9u JgAUtZwP0suIT5VikbeGI0/kTzdUWO8e2oy0SFyPRc/WzThSHgLVJeKVS5mrHINJ KBZ3LlCGggm0CCMv/Zkb7GL6rzfLCNR5sd/AkONs34ytBvrEZv8Kg00mgb9mROHd 4ZChiq1UphNNix44RSxti7hYllTLjDIK1xcsRG+MVqrnD3ykYM+7Sc99Jdfocttt +/8fuvF24WXtDYbO/S09yHKxl5BVhzTpqfZ77C53yBeQceIPSAoM9rhLuWAgPQH0 ZPdKbXATJe0KlCUwpH6binp0z9VNE7m0/S2H/udzxkFhr4zKFClb6BYFzrFFVQl+ 6Yix1OqvW9ZBbXqZMnnndSPcztQnYJh7laOxcfSeZlmV940P6sc= =sBQK -----END PGP SIGNATURE----- parent 3d7637ac459e56c4bc9e6b6ca8739390b6245a81 author Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> 1656374316 -0700 committer pgodithi 1657636592 -0400 gpgsig -----BEGIN PGP SIGNATURE----- iQIzBAABCAAdFiEEuaPMeSek3p9IWNzDMSDf0RO9/aIFAmLNhvAACgkQMSDf0RO9 /aJqdg/7BLoPI43TrmJpACpRYNb6uaieKiBO+K+S+moKd3cWn61gbwCkXsr1yOxd X80p6TV8gxhYeIx6fJ1d5RSeIHQp/pNyvFaVKnLHr/ImkeqarUtKLv8HX/OuRBmJ Vm5NPb5PJnz09HIfP6ZtIp4j++Ys7GFfg7TULIG9b2VW03xpokM3oz2L/X9O8NoS eZkVpxqeSRJMSppm8NnNjot+y1njD3TJozzoUwE3EGJBj3SVV+lzm2fa50Dqg2fj HczMB+7iIsUpF34FX549Y5JW3SkmBfF4f9GPP8XWa6BvRd7Y6yVqjhLN8DXu6kZO JaKkGdR0eEh8vKwIPA1+daGpPLQUIi6iGLphSzxhSzLt5AevXGEOPH0XDllj3abr ISG4f6B16jlP4wfI9a+LCeaBwVL5KB59ArkEldRAV/fSswmXj9tNFr6rcmbKdJfT mYMkwNEQw0RyPJqI7j7fpH79++cOYcMZ14y/xryQsmkBcxlvaV/sYKDlhuvAiU0c W9wkFvqLaiFzP2uxIpnJUHpUGVl0moAtBKKPKpnhY66awUGNfouElbXbXAykebs6 +INmwxsDygdMYuhkbO2WKo+YYxqpf9oiTpJDcHSQiJSe3VEEUBZvbW7VaQQk7evl NQpjdUlCBKqe+t0LXNxXED+08cr1s5v2zoBPUpLRfr6l44lVT/M= =ddql -----END PGP SIGNATURE----- parent 3d7637ac459e56c4bc9e6b6ca8739390b6245a81 author Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> 1656374316 -0700 committer pgodithi 1657636567 -0400 gpgsig -----BEGIN PGP SIGNATURE----- iQIzBAABCAAdFiEEuaPMeSek3p9IWNzDMSDf0RO9/aIFAmLNhtcACgkQMSDf0RO9 /aJcLA/6A33rJJPQms5mef3c7cYFClJPRO1ggFL0//CPlTrT+f0mzgNJqEvXf61N /41HNkHlrxNHcV7cU4g0OTcA3W5UOGdMdkTyHS6wNxPGp2VlQ2/bUQNbUmwNO4u1 JiEjrtN2E/dB+WP+IVRcWl86jC95Zv23B+kcrAxEu3UT1fuCzUb26MDbLuvba/t9 DWjviuzWDLJc3ONDu67JugyDealh6AfZ/lxQS3fJyq/VYOt7QegGJcqXAU6Da1rC YcrmNIj6ycfWoEH7FJu/bTLihZoVYZ2Irf4wOL0G8rVqQ+yp9JKparGlMV1kg0OI 2+6p+9iZhWTdNhbg9p6+lDPDI3/2QQSWCORTc4OKs7Jj6Yo/t2JmeSqzyrPtceU0 mT5ANzu1yuZlCPQnIfW5czpYlOhTohG33EgQZAiMyZyeEibVsrL/3oXoWeIxabKp rxXgYgWmTyEGjiWqg9gGTJWsN4iFGlIHlEeGSytG8FTBZykSvvOfUC4kqbjYth8W Xrf1g4x+8uTTqe1vyR1/z3xqr5WpTwyBsGfGcr2A4JFLopToFa5tuw46vz9F92en gSoCfgpX+9D4vY5qldYS57uFcKkQ2w6Mf9lJ5k8m8nYL7FBX35mHNqou7ZSfIKOL AqqnMoVKjB0pDfcNsjdd60HeJ16Dtg43Jh04E9CX2g75LTybaUU= =avKu -----END PGP SIGNATURE----- parent 3d7637ac459e56c4bc9e6b6ca8739390b6245a81 author Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> 1656374316 -0700 committer pgodithi 1657636536 -0400 gpgsig -----BEGIN PGP SIGNATURE----- iQIzBAABCAAdFiEEuaPMeSek3p9IWNzDMSDf0RO9/aIFAmLNhrgACgkQMSDf0RO9 /aIkrxAAij7gaeNhHT5jydV9C8MXako0iViYhhFa0Ig7vRiJOJkFoFzd3mPRZsmI vnrEqag5MekOj8Gsj6Ric6jUhW4TcmsaRk+VgpUsPkSWOsj3EqzyteLMKUp2f1df sqJYXNuWZIqUs8wi1urRf8lkzUtcyTDqKf9QZfJwKlrDtE0ZlPc6Zj/+/XfHNftw 8W+q2h2aGyQqbOS8jXcelYk+mtXvDAB8PyM6neXgHRqmkHlxR+y/cLvSJ/1SJ9rh YL55MtdS7EEentZnomiSa194wZtr2x4TjjTJyJUYR81e5Xv/SMxtjmPu60crQQg+ fuZnoQ+xrkjPzGw9tzxoZEgSA6ntSnJJDw7kjOhYNgbHvCmnWstxPXYA/z+bzUQD Iad2EwSn5YTktQdHp71tsIsnyDz4QY6yyyVYVcobjQmE/fKYnyZMAriKd5rsosCT Qo5t3PjQDqAdyxQQAsiRw5PigSdMb0HGIdpY3uhCOMA6UeqFz1Rq9ww8jnMRwbif qPi7vyOLcYaL6XPDXRBHB4dSE0crgQFn8cqUqOegxKJy0sneFGzvKQeFn1iBl5Yj auL6LR0pB3CEFnNwrIuf7q4gxdFJ71Lmi2LpVAm9jumGuhrHnGzCz2wiXEJKb8hq w+mjMI6e5h9UyKWuQ4nPE3yy5pOw0uVV4GxSpcsTAWnSLq/XP1I= =Ss4T -----END PGP SIGNATURE----- parent 3d7637ac459e56c4bc9e6b6ca8739390b6245a81 author Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> 1656374316 -0700 committer pgodithi 1657636424 -0400 gpgsig -----BEGIN PGP SIGNATURE----- iQIzBAABCAAdFiEEuaPMeSek3p9IWNzDMSDf0RO9/aIFAmLNhkgACgkQMSDf0RO9 /aJ/7hAAlX0LgF2gphRxXdTfjhk9DQFUYVRv6iDHZN5bbW3EWC1A48Im6IM9QWBn cMwMXzQ717z/BK6hRPqt2BJCnJfoh6e5T08DCQyQRXWZs64JBSMLztn/+pJqhXGM wlmQspqzO3QqWr1DN0RyJMM7NSo0RVXcLFjqgMFOM9U778Ben5hOWXH4WsBXGWLj PXuAKyLKl5lziGPCrjfjFv7o8xhsyguCYTwomZZsJfGiC+VcRSMQbxp2VaCBIWMx nZvnfJNWn1OOiy2ag9K0eyoD4wnTmgYNQCABvMLgroEj4YVeIcOOwKZ9aACJXcM5 hd5eftbjmHMJAaBaouVeg6bwKxmk+/7lK2T2AIZqgl7TK7kaF88SIIAfTKXjFv5D QOz60yq02LUJjDfgyuCEZCbQfPubyT5+Zt16wq0N8jeX+BYcKIu49DsHQ1+W4lNG cbjhfhioC0rQ8RcfB5T5kFaZvf46REmTioa40LPTEdtX/qNlr1LmJhjCacaKIXi2 9y9jLnDs1gkVmQZN/ThkNRuagbeRovrvxDoRNam6HDpI/KTIKW1jCWPwPrvJDLUQ uYYW+3X7YTWYft4vtX4wJYgHe9nmCp6kU0EBMrCCDg2vheh3wxiTUQRrrDPNBAiC gA1T/HXxuhKkSyycuIdiqEIA0kcXHHktcbWAzjuPyDt6AFrDo/c= =OVLM -----END PGP SIGNATURE----- Change ref to 2.1 for opensearch dahsboards (#2267) Signed-off-by: Sayali Gaikawad test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi Add test manifest to check build job (#2273) Signed-off-by: Sayali Gaikawad Set concurrent build limit for gradle check (#2274) Signed-off-by: Peter Zhu Add test coverage xml and support 1.x gradle check (#2279) * Add test coverage xml and support 1.x gradle check Signed-off-by: Peter Zhu * Remove comments Signed-off-by: Peter Zhu Add java args to 2.2.0 manifest (#2281) Signed-off-by: Sayali Gaikawad Update downloadFromS3 lib env vars with credentials (#2278) Signed-off-by: Sayali Gaikawad Update buildUploadManifestSHA lib env vars with credentials (#2277) Signed-off-by: Sayali Gaikawad Update signArtifacts lib env vars with credentials (#2275) Signed-off-by: Sayali Gaikawad [OSD][2.1] add functional test repo (#2282) Signed-off-by: Kawika Avilla Change codecov xml name and move gradle check script to build repo (#2283) * Change codecov xml name and move gradle check script to build repo Signed-off-by: Peter Zhu * Add tests Signed-off-by: Peter Zhu * Tweak tokens Signed-off-by: Peter Zhu * Tweak jenkins codeCoverage.xml Signed-off-by: Peter Zhu * Add codeCoverage.xml Signed-off-by: Peter Zhu Add gradle cleanup before the checks (#2285) Signed-off-by: Peter Zhu Update promoteArtifactsLib lib env vars with credentials (#2284) * Resolve merge conflicts Signed-off-by: Sayali Gaikawad Update remaining env vars with credentials in jenkins libraries (#2286) * Update getManifestSHA lib env vars with credentials Signed-off-by: Sayali Gaikawad Remove 2.1.0 from cron for building release candidates (#2287) Signed-off-by: Sayali Gaikawad test versionmatrixtest.yaml Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test version increment automation Signed-off-by: pgodithi test version increment automation Signed-off-by: pgodithi Staging version increment automation Signed-off-by: pgodithi Staging version increment automation Signed-off-by: pgodithi Staging version increment automation Signed-off-by: pgodithi version increment automation Signed-off-by: pgodithi Change ref to 2.1 for opensearch dahsboards (#2267) Signed-off-by: Sayali Gaikawad Add test manifest to check build job (#2273) Signed-off-by: Sayali Gaikawad Set concurrent build limit for gradle check (#2274) Signed-off-by: Peter Zhu Add test coverage xml and support 1.x gradle check (#2279) * Add test coverage xml and support 1.x gradle check Signed-off-by: Peter Zhu * Remove comments Signed-off-by: Peter Zhu Add java args to 2.2.0 manifest (#2281) Signed-off-by: Sayali Gaikawad Update downloadFromS3 lib env vars with credentials (#2278) Signed-off-by: Sayali Gaikawad Update buildUploadManifestSHA lib env vars with credentials (#2277) Signed-off-by: Sayali Gaikawad Update signArtifacts lib env vars with credentials (#2275) Signed-off-by: Sayali Gaikawad Add gradle cleanup before the checks (#2285) Signed-off-by: Peter Zhu Remove 2.1.0 from cron for building release candidates (#2287) Signed-off-by: Sayali Gaikawad Add signer to support signing windows artifacts (#2156) * Add signer for windows distribution and implement the jenkins libarary to be capable of signing for windows. Signed-off-by: Zelin Hao * Replace platform with MagicMock for testing Signed-off-by: Zelin Hao * Change to not initiate signer for mock case Signed-off-by: Zelin Hao * Change the default signature type to .asc for compatibility with old usage Signed-off-by: Zelin Hao * Add signer abstract class Signed-off-by: Zelin Hao * Remove abstract method Signed-off-by: Zelin Hao * Commit test cases Signed-off-by: Zelin Hao * Fix python tests Signed-off-by: Zelin Hao * Remove unused library import Signed-off-by: Zelin Hao * Fix mock repo tests Signed-off-by: Zelin Hao * Remove commmented block Signed-off-by: Zelin Hao * Fix the python tests and combine credentials Signed-off-by: Zelin Hao * Change path for windows tests Signed-off-by: Zelin Hao Update sql plugin name to opensearch-sql-plugin (#2295) Signed-off-by: Joshua Li Update the tag creation workflow with bundle manifest groovy library (#2296) Signed-off-by: Zelin Hao Switch runner of gradle check to c524xlarge for more stable runs (#2298) Signed-off-by: Peter Zhu Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Add jdk14 for gradle check 1.x version (#2300) Signed-off-by: Peter Zhu Create a docker promotion jenkins workflow (#2288) * Add docker promotion jenkins jobs Signed-off-by: Zelin Hao * Create a Jenkins workflow for docker promotion job Signed-off-by: Zelin Hao * Fix promoting product with its individual image tag Signed-off-by: Zelin Hao * Add data prepper into tests Signed-off-by: Zelin Hao * Convert to use docker copy job Signed-off-by: Zelin Hao * Update test cases Signed-off-by: Zelin Hao Add java home args in 1.3.4 manifest (#2299) Signed-off-by: Zelin Hao Add 2.1.0 release notes (#2302) * Add 2.1.0 release notes Signed-off-by: Sayali Gaikawad CVE's Fix for Jenkins #2280 #2272 #2239 #1962 (#2303) * Change ref to 2.1 for opensearch dahsboards (#2267) Signed-off-by: Sayali Gaikawad test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi Add test manifest to check build job (#2273) Signed-off-by: Sayali Gaikawad Set concurrent build limit for gradle check (#2274) Signed-off-by: Peter Zhu Add test coverage xml and support 1.x gradle check (#2279) * Add test coverage xml and support 1.x gradle check Signed-off-by: Peter Zhu * Remove comments Signed-off-by: Peter Zhu Add java args to 2.2.0 manifest (#2281) Signed-off-by: Sayali Gaikawad Update downloadFromS3 lib env vars with credentials (#2278) Signed-off-by: Sayali Gaikawad Update buildUploadManifestSHA lib env vars with credentials (#2277) Signed-off-by: Sayali Gaikawad Update signArtifacts lib env vars with credentials (#2275) Signed-off-by: Sayali Gaikawad [OSD][2.1] add functional test repo (#2282) Signed-off-by: Kawika Avilla Change codecov xml name and move gradle check script to build repo (#2283) * Change codecov xml name and move gradle check script to build repo Signed-off-by: Peter Zhu * Add tests Signed-off-by: Peter Zhu * Tweak tokens Signed-off-by: Peter Zhu * Tweak jenkins codeCoverage.xml Signed-off-by: Peter Zhu * Add codeCoverage.xml Signed-off-by: Peter Zhu Add gradle cleanup before the checks (#2285) Signed-off-by: Peter Zhu Update promoteArtifactsLib lib env vars with credentials (#2284) * Resolve merge conflicts Signed-off-by: Sayali Gaikawad Update remaining env vars with credentials in jenkins libraries (#2286) * Update getManifestSHA lib env vars with credentials Signed-off-by: Sayali Gaikawad Remove 2.1.0 from cron for building release candidates (#2287) Signed-off-by: Sayali Gaikawad test versionmatrixtest.yaml Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test version increment automation Signed-off-by: pgodithi test version increment automation Signed-off-by: pgodithi Staging version increment automation Signed-off-by: pgodithi Staging version increment automation Signed-off-by: pgodithi Staging version increment automation Signed-off-by: pgodithi version increment automation Signed-off-by: pgodithi * Version increment automation Signed-off-by: pgodithi * Version increment automation Signed-off-by: pgodithi * Version increment automation Signed-off-by: pgodithi * Version increment automation Signed-off-by: pgodithi * Version increment automation Signed-off-by: pgodithi * vulnerabilitiesfix Signed-off-by: pgodithi * vulnerabilitiesfix Signed-off-by: pgodithi * CVE fix Signed-off-by: pgodithi * CVE's: CVE-2021-30129 sshd-core-2.5.1.jar fix Signed-off-by: pgodithi * CVE's: CVE-2021-30129 sshd-core-2.5.1.jar fix Signed-off-by: pgodithi * CVE fix Signed-off-by: pgodithi * CVE fix Signed-off-by: pgodithi Co-authored-by: Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> Modify 2.1.0 release notes for OpenSearch component (#2313) Signed-off-by: Sayali Gaikawad Update Gradle Wrapper to 7.4.2 (#2289) * Update Gradle Wrapper to 7.4.2 Signed-off-by: Daniel Widdis * Update Jenkins scan to new Gradle version Signed-off-by: Daniel Widdis version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation: added ci tests Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Add handling of push events on gradle check (#2315) Signed-off-by: Peter Zhu Change refs to tags and remove cron for 2.0.2 builds (#2316) * Change refs to tags and remove cron for 2.0.2 builds Signed-off-by: Sayali Gaikawad Add OpenSearch release notes link (#2318) Signed-off-by: Sayali Gaikawad Add components to 1.3.4 manifest (#2319) * Add components to 1.3.4 manifest Signed-off-by: Zelin Hao * Remove atomicwrites from pipfile Signed-off-by: Zelin Hao * Adding back the atomicwrites Signed-off-by: Zelin Hao Move data prepper jenkins env vars to credentials (#2320) * Move data prepper jenkins env vars to credentials Signed-off-by: Sayali Gaikawad Update the JDK version for manifest checks on 3.0.0 (#2301) * Exclude manifest checks for 3.0.0 for now Signed-off-by: Zelin Hao * Add conditional checks for jdk14 and jdk17 Signed-off-by: Zelin Hao * Change jdk version Signed-off-by: Zelin Hao Update release issue template (#2317) * Update release issue template Signed-off-by: Sayali Gaikawad Rename webhooks for notifications (#2322) Signed-off-by: Sayali Gaikawad Add check for test manifest and update error message (#2321) * Add check for test manifest and update error message Signed-off-by: Sayali Gaikawad Fixing junit task (#2328) Signed-off-by: Andriy Redko Add test manifest to check build job (#2273) Signed-off-by: Sayali Gaikawad Set concurrent build limit for gradle check (#2274) Signed-off-by: Peter Zhu Add java args to 2.2.0 manifest (#2281) Signed-off-by: Sayali Gaikawad Update downloadFromS3 lib env vars with credentials (#2278) Signed-off-by: Sayali Gaikawad Update buildUploadManifestSHA lib env vars with credentials (#2277) Signed-off-by: Sayali Gaikawad Update signArtifacts lib env vars with credentials (#2275) Signed-off-by: Sayali Gaikawad Add gradle cleanup before the checks (#2285) Signed-off-by: Peter Zhu Remove 2.1.0 from cron for building release candidates (#2287) Signed-off-by: Sayali Gaikawad Add signer to support signing windows artifacts (#2156) * Add signer for windows distribution and implement the jenkins libarary to be capable of signing for windows. Signed-off-by: Zelin Hao * Replace platform with MagicMock for testing Signed-off-by: Zelin Hao * Change to not initiate signer for mock case Signed-off-by: Zelin Hao * Change the default signature type to .asc for compatibility with old usage Signed-off-by: Zelin Hao * Add signer abstract class Signed-off-by: Zelin Hao * Remove abstract method Signed-off-by: Zelin Hao * Commit test cases Signed-off-by: Zelin Hao * Fix python tests Signed-off-by: Zelin Hao * Remove unused library import Signed-off-by: Zelin Hao * Fix mock repo tests Signed-off-by: Zelin Hao * Remove commmented block Signed-off-by: Zelin Hao * Fix the python tests and combine credentials Signed-off-by: Zelin Hao * Change path for windows tests Signed-off-by: Zelin Hao Update sql plugin name to opensearch-sql-plugin (#2295) Signed-off-by: Joshua Li Update the tag creation workflow with bundle manifest groovy library (#2296) Signed-off-by: Zelin Hao Switch runner of gradle check to c524xlarge for more stable runs (#2298) Signed-off-by: Peter Zhu Add jdk14 for gradle check 1.x version (#2300) Signed-off-by: Peter Zhu Create a docker promotion jenkins workflow (#2288) * Add docker promotion jenkins jobs Signed-off-by: Zelin Hao * Create a Jenkins workflow for docker promotion job Signed-off-by: Zelin Hao * Fix promoting product with its individual image tag Signed-off-by: Zelin Hao * Add data prepper into tests Signed-off-by: Zelin Hao * Convert to use docker copy job Signed-off-by: Zelin Hao * Update test cases Signed-off-by: Zelin Hao Add java home args in 1.3.4 manifest (#2299) Signed-off-by: Zelin Hao Add 2.1.0 release notes (#2302) * Add 2.1.0 release notes Signed-off-by: Sayali Gaikawad CVE's Fix for Jenkins #2280 #2272 #2239 #1962 (#2303) * Change ref to 2.1 for opensearch dahsboards (#2267) Signed-off-by: Sayali Gaikawad test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi versionincrement.yaml matrix test Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi Add test manifest to check build job (#2273) Signed-off-by: Sayali Gaikawad Set concurrent build limit for gradle check (#2274) Signed-off-by: Peter Zhu Add test coverage xml and support 1.x gradle check (#2279) * Add test coverage xml and support 1.x gradle check Signed-off-by: Peter Zhu * Remove comments Signed-off-by: Peter Zhu Add java args to 2.2.0 manifest (#2281) Signed-off-by: Sayali Gaikawad Update downloadFromS3 lib env vars with credentials (#2278) Signed-off-by: Sayali Gaikawad Update buildUploadManifestSHA lib env vars with credentials (#2277) Signed-off-by: Sayali Gaikawad Update signArtifacts lib env vars with credentials (#2275) Signed-off-by: Sayali Gaikawad [OSD][2.1] add functional test repo (#2282) Signed-off-by: Kawika Avilla Change codecov xml name and move gradle check script to build repo (#2283) * Change codecov xml name and move gradle check script to build repo Signed-off-by: Peter Zhu * Add tests Signed-off-by: Peter Zhu * Tweak tokens Signed-off-by: Peter Zhu * Tweak jenkins codeCoverage.xml Signed-off-by: Peter Zhu * Add codeCoverage.xml Signed-off-by: Peter Zhu Add gradle cleanup before the checks (#2285) Signed-off-by: Peter Zhu Update promoteArtifactsLib lib env vars with credentials (#2284) * Resolve merge conflicts Signed-off-by: Sayali Gaikawad Update remaining env vars with credentials in jenkins libraries (#2286) * Update getManifestSHA lib env vars with credentials Signed-off-by: Sayali Gaikawad Remove 2.1.0 from cron for building release candidates (#2287) Signed-off-by: Sayali Gaikawad test versionmatrixtest.yaml Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi Version increment automation Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test versionmatrixtest.yaml Signed-off-by: pgodithi test version increment automation Signed-off-by: pgodithi test version increment automation Signed-off-by: pgodithi Staging version increment automation Signed-off-by: pgodithi Staging version increment automation Signed-off-by: pgodithi Staging version increment automation Signed-off-by: pgodithi version increment automation Signed-off-by: pgodithi * Version increment automation Signed-off-by: pgodithi * Version increment automation Signed-off-by: pgodithi * Version increment automation Signed-off-by: pgodithi * Version increment automation Signed-off-by: pgodithi * Version increment automation Signed-off-by: pgodithi * vulnerabilitiesfix Signed-off-by: pgodithi * vulnerabilitiesfix Signed-off-by: pgodithi * CVE fix Signed-off-by: pgodithi * CVE's: CVE-2021-30129 sshd-core-2.5.1.jar fix Signed-off-by: pgodithi * CVE's: CVE-2021-30129 sshd-core-2.5.1.jar fix Signed-off-by: pgodithi * CVE fix Signed-off-by: pgodithi * CVE fix Signed-off-by: pgodithi Co-authored-by: Sayali Gaikawad <61760125+gaiksaya@users.noreply.github.com> Modify 2.1.0 release notes for OpenSearch component (#2313) Signed-off-by: Sayali Gaikawad Update Gradle Wrapper to 7.4.2 (#2289) * Update Gradle Wrapper to 7.4.2 Signed-off-by: Daniel Widdis * Update Jenkins scan to new Gradle version Signed-off-by: Daniel Widdis Add handling of push events on gradle check (#2315) Signed-off-by: Peter Zhu Change refs to tags and remove cron for 2.0.2 builds (#2316) * Change refs to tags and remove cron for 2.0.2 builds Signed-off-by: Sayali Gaikawad Add OpenSearch release notes link (#2318) Signed-off-by: Sayali Gaikawad Add components to 1.3.4 manifest (#2319) * Add components to 1.3.4 manifest Signed-off-by: Zelin Hao * Remove atomicwrites from pipfile Signed-off-by: Zelin Hao * Adding back the atomicwrites Signed-off-by: Zelin Hao Move data prepper jenkins env vars to credentials (#2320) * Move data prepper jenkins env vars to credentials Signed-off-by: Sayali Gaikawad Update the JDK version for manifest checks on 3.0.0 (#2301) * Exclude manifest checks for 3.0.0 for now Signed-off-by: Zelin Hao * Add conditional checks for jdk14 and jdk17 Signed-off-by: Zelin Hao * Change jdk version Signed-off-by: Zelin Hao Update release issue template (#2317) * Update release issue template Signed-off-by: Sayali Gaikawad Rename webhooks for notifications (#2322) Signed-off-by: Sayali Gaikawad Add check for test manifest and update error message (#2321) * Add check for test manifest and update error message Signed-off-by: Sayali Gaikawad Fixing junit task (#2328) Signed-off-by: Andriy Redko Version increment automation: matrix check Signed-off-by: pgodithi Version increment automation: matrix check Signed-off-by: pgodithi Version increment automation: matrix check Signed-off-by: pgodithi Version increment automation: matrix check Signed-off-by: pgodithi Version increment automation: matrix check Signed-off-by: pgodithi Version increment automation: yamllint Signed-off-by: pgodithi Version increment automation: yamllint Signed-off-by: pgodithi Version increment automation: matrix test Signed-off-by: pgodithi Version increment automation: matrix test Signed-off-by: pgodithi Version increment automation: matrix test Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: test path matrix Signed-off-by: pgodithi Version increment automation: yamllint fix Signed-off-by: pgodithi Version increment automation: yamllint fix Signed-off-by: pgodithi Version increment automation: yamllint fix Signed-off-by: pgodithi Move artifact bucket name from env to creds (#2327) Signed-off-by: Sayali Gaikawad Add some of the remaining components (#2329) Signed-off-by: Zelin Hao Add alerting and ml (#2330) Signed-off-by: Zelin Hao Updated manifests. (#2323) Signed-off-by: opensearch-ci-bot Co-authored-by: opensearch-ci-bot Add remaining components (#2331) Signed-off-by: Zelin Hao Add legacy-manifests folder Signed-off-by: pgodithi --- .github/ISSUE_TEMPLATE/release_template.md | 5 +- .github/workflows/manifests.yml | 46 ++- Pipfile.lock | 300 +++++++++--------- build.gradle | 10 +- gradle/wrapper/gradle-wrapper.jar | Bin 59203 -> 59821 bytes gradle/wrapper/gradle-wrapper.properties | 15 +- gradlew | 259 +++++++++------ gradlew.bat | 2 +- jenkins/check-for-build.jenkinsfile | 7 +- .../perf-test.jenkinsfile | 4 +- ...ase-data-prepper-all-artifacts.jenkinsfile | 7 + jenkins/gradle/gradle-check.jenkinsfile | 42 ++- .../bwc-test.jenkinsfile | 8 +- .../distribution-build.jenkinsfile | 4 +- .../integ-test.jenkinsfile | 8 +- .../maven-sign-release.jenkinsfile | 1 + jenkins/opensearch-ruby/Jenkinsfile | 3 + jenkins/opensearch/bwc-test.jenkinsfile | 8 +- .../opensearch/distribution-build.jenkinsfile | 4 +- jenkins/opensearch/integ-test.jenkinsfile | 8 +- jenkins/opensearch/perf-test.jenkinsfile | 4 +- .../promotion/promote-docker-ecr.jenkinsfile | 86 +++++ .../sign-standalone-artifacts.jenkinsfile | 16 +- .../whitesource-scan.jenkinsfile | 6 +- .../1.0.0/opensearch-1.0.0-maven.yml | 0 .../1.0.0/opensearch-1.0.0-test.yml | 0 .../1.0.0/opensearch-1.0.0.yml | 0 .../1.0.1/opensearch-1.0.1-test.yml | 0 .../1.0.1/opensearch-1.0.1.yml | 0 .../1.0.1/opensearch-dashboards-1.0.1.yml | 0 .../1.1.0/opensearch-1.1.0-test.yml | 0 .../1.1.0/opensearch-1.1.0.yml | 0 .../opensearch-dashboards-1.1.0-test.yml | 0 .../1.1.0/opensearch-dashboards-1.1.0.yml | 0 .../1.1.1/opensearch-1.1.1-test.yml | 0 .../1.1.1/opensearch-1.1.1.yml | 0 .../opensearch-dashboards-1.1.1-test.yml | 0 .../1.1.1/opensearch-dashboards-1.1.1.yml | 0 .../1.2.0/opensearch-1.2.0-test.yml | 0 .../1.2.0/opensearch-1.2.0.yml | 0 .../opensearch-dashboards-1.2.0-test.yml | 0 .../1.2.0/opensearch-dashboards-1.2.0.yml | 0 .../1.2.1/opensearch-1.2.1-test.yml | 0 .../1.2.1/opensearch-1.2.1.yml | 0 .../1.2.1/opensearch-dashboards-1.2.1.yml | 0 .../1.2.2/opensearch-1.2.2-test.yml | 0 .../1.2.2/opensearch-1.2.2.yml | 0 .../1.2.3/opensearch-1.2.3-test.yml | 0 .../1.2.3/opensearch-1.2.3.yml | 0 .../1.2.4/opensearch-1.2.4-test.yml | 0 .../1.2.4/opensearch-1.2.4.yml | 0 .../1.2.5/opensearch-1.2.5.yml | 0 manifests/1.3.4/opensearch-1.3.4-test.yml | 71 +++++ manifests/1.3.4/opensearch-1.3.4.yml | 93 ++++++ .../opensearch-dashboards-1.3.4-test.yml | 17 + .../1.3.4/opensearch-dashboards-1.3.4.yml | 31 ++ manifests/2.1.0/opensearch-2.1.0.yml | 36 +-- .../2.1.0/opensearch-dashboards-2.1.0.yml | 25 +- manifests/2.1.1/opensearch-2.1.1.yml | 15 + manifests/2.2.0/opensearch-2.2.0.yml | 1 + .../opensearch-release-notes-2.1.0.md | 245 ++++++++++++++ scripts/gradle/gradle-check.sh | 69 ++++ src/run_sign.py | 3 +- src/sign_workflow/sign_args.py | 5 +- src/sign_workflow/sign_artifacts.py | 11 +- src/sign_workflow/signer.py | 40 +-- src/sign_workflow/signer_pgp.py | 51 +++ src/sign_workflow/signer_windows.py | 51 +++ src/sign_workflow/signers.py | 31 ++ tests/jenkins/TestCCRPerfTest.groovy | 2 +- tests/jenkins/TestCopyContainer.groovy | 2 +- tests/jenkins/TestOpenSearchBwcTest.groovy | 2 +- .../TestOpenSearchDashboardsBwcTest.groovy | 2 +- .../TestOpenSearchDashboardsIntegTest.groovy | 2 +- tests/jenkins/TestOpenSearchIntegTest.groovy | 2 +- tests/jenkins/TestPromoteArtifacts.groovy | 21 +- tests/jenkins/TestPromoteContainer.groovy | 116 +++++++ tests/jenkins/TestPromoteYumRepos.groovy | 17 +- tests/jenkins/TestPublishNotification.groovy | 2 +- .../TestRunNonSecurityPerfTestScript.groovy | 2 +- tests/jenkins/TestRunPerfTestScript.groovy | 2 +- .../perf-test.jenkinsfile.txt | 11 +- ...data-prepper-all-artifacts.jenkinsfile.txt | 57 ++-- .../maven-sign-release.jenkinsfile.txt | 33 +- .../bwc-test.jenkinsfile.txt | 4 +- .../integ-test.jenkinsfile.txt | 4 +- .../opensearch/bwc-test.jenkinsfile.txt | 4 +- .../opensearch/integ-test.jenkinsfile.txt | 4 +- .../perf-test-with-security.jenkinsfile.txt | 18 +- .../opensearch/perf-test.jenkinsfile.txt | 11 +- ...stPromoteContainerToDocker.jenkinsfile.txt | 32 ++ ...inerToDockerECRLatestMajor.jenkinsfile.txt | 107 +++++++ ...oteContainerToDockerLatest.jenkinsfile.txt | 47 +++ ...ntainerToDockerLatestMajor.jenkinsfile.txt | 62 ++++ ...moteContainerToDockerMajor.jenkinsfile.txt | 47 +++ ...eContainerToECRLatestMajor.jenkinsfile.txt | 62 ++++ .../release-tag-dashboards.jenkinsfile.txt | 48 +-- .../release-tag/release-tag.jenkinsfile.txt | 64 ++-- .../sign-standalone-artifacts.jenkinsfile.txt | 27 +- .../jobs/AssembleManifest_rpm_Jenkinsfile.txt | 18 +- .../BuildUploadManifestSHA_Jenkinsfile.txt | 14 +- .../jobs/CreateReleaseTag_Jenkinsfile.txt | 64 ++-- .../jobs/DownloadFromS3_Jenkinsfile.txt | 6 +- ...ManifestSHA_Jenkinsfile_does_not_exist.txt | 7 +- .../GetManifestSHA_Jenkinsfile_exists.txt | 7 +- .../PromoteArtifactsQualifier_Jenkinsfile.txt | 38 ++- ...fier_OpenSearch_Dashboards_Jenkinsfile.txt | 38 ++- ...ArtifactsQualifier_actions_Jenkinsfile.txt | 142 +++++---- ...ions_OpenSearch_Dashboards_Jenkinsfile.txt | 142 +++++---- .../jobs/PromoteArtifacts_Jenkinsfile.txt | 79 +++-- ...acts_OpenSearch_Dashboards_Jenkinsfile.txt | 38 ++- .../PromoteArtifacts_actions_Jenkinsfile.txt | 241 +++++++------- ...ions_OpenSearch_Dashboards_Jenkinsfile.txt | 142 +++++---- .../jobs/PromoteYumRepos_Jenkinsfile.txt | 141 ++++---- .../jobs/PublishNotification_Jenkinsfile | 2 +- .../jobs/PublishNotification_Jenkinsfile.txt | 4 +- .../jobs/RunGradleCheck_Jenkinsfile.txt | 30 +- .../jobs/SignArtifacts_Jenkinsfile.txt | 70 ++-- .../CreateReleaseTagLibTester.groovy | 18 +- .../lib-testers/SignArtifactsLibTester.groovy | 44 ++- tests/test_run_sign.py | 3 +- .../test_sign_artifacts.py | 39 ++- tests/tests_sign_workflow/test_signer.py | 112 +------ tests/tests_sign_workflow/test_signer_pgp.py | 104 ++++++ .../test_signer_windows.py | 49 +++ tests/tests_sign_workflow/test_signers.py | 30 ++ vars/buildUploadManifestSHA.groovy | 13 +- vars/createReleaseTag.groovy | 8 +- vars/downloadFromS3.groovy | 10 +- vars/getManifestSHA.groovy | 9 +- vars/promoteArtifacts.groovy | 124 ++++---- vars/promoteContainer.groovy | 102 ++++++ vars/promoteYumRepos.groovy | 95 +++--- vars/runGradleCheck.groovy | 30 +- vars/runPerfTestScript.groovy | 9 +- vars/signArtifacts.groovy | 77 +++-- 136 files changed, 3189 insertions(+), 1372 deletions(-) create mode 100644 jenkins/promotion/promote-docker-ecr.jenkinsfile rename {manifests => legacy-manifests}/1.0.0/opensearch-1.0.0-maven.yml (100%) rename {manifests => legacy-manifests}/1.0.0/opensearch-1.0.0-test.yml (100%) rename {manifests => legacy-manifests}/1.0.0/opensearch-1.0.0.yml (100%) rename {manifests => legacy-manifests}/1.0.1/opensearch-1.0.1-test.yml (100%) rename {manifests => legacy-manifests}/1.0.1/opensearch-1.0.1.yml (100%) rename {manifests => legacy-manifests}/1.0.1/opensearch-dashboards-1.0.1.yml (100%) rename {manifests => legacy-manifests}/1.1.0/opensearch-1.1.0-test.yml (100%) rename {manifests => legacy-manifests}/1.1.0/opensearch-1.1.0.yml (100%) rename {manifests => legacy-manifests}/1.1.0/opensearch-dashboards-1.1.0-test.yml (100%) rename {manifests => legacy-manifests}/1.1.0/opensearch-dashboards-1.1.0.yml (100%) rename {manifests => legacy-manifests}/1.1.1/opensearch-1.1.1-test.yml (100%) rename {manifests => legacy-manifests}/1.1.1/opensearch-1.1.1.yml (100%) rename {manifests => legacy-manifests}/1.1.1/opensearch-dashboards-1.1.1-test.yml (100%) rename {manifests => legacy-manifests}/1.1.1/opensearch-dashboards-1.1.1.yml (100%) rename {manifests => legacy-manifests}/1.2.0/opensearch-1.2.0-test.yml (100%) rename {manifests => legacy-manifests}/1.2.0/opensearch-1.2.0.yml (100%) rename {manifests => legacy-manifests}/1.2.0/opensearch-dashboards-1.2.0-test.yml (100%) rename {manifests => legacy-manifests}/1.2.0/opensearch-dashboards-1.2.0.yml (100%) rename {manifests => legacy-manifests}/1.2.1/opensearch-1.2.1-test.yml (100%) rename {manifests => legacy-manifests}/1.2.1/opensearch-1.2.1.yml (100%) rename {manifests => legacy-manifests}/1.2.1/opensearch-dashboards-1.2.1.yml (100%) rename {manifests => legacy-manifests}/1.2.2/opensearch-1.2.2-test.yml (100%) rename {manifests => legacy-manifests}/1.2.2/opensearch-1.2.2.yml (100%) rename {manifests => legacy-manifests}/1.2.3/opensearch-1.2.3-test.yml (100%) rename {manifests => legacy-manifests}/1.2.3/opensearch-1.2.3.yml (100%) rename {manifests => legacy-manifests}/1.2.4/opensearch-1.2.4-test.yml (100%) rename {manifests => legacy-manifests}/1.2.4/opensearch-1.2.4.yml (100%) rename {manifests => legacy-manifests}/1.2.5/opensearch-1.2.5.yml (100%) create mode 100644 manifests/1.3.4/opensearch-1.3.4-test.yml create mode 100644 manifests/1.3.4/opensearch-dashboards-1.3.4-test.yml create mode 100644 manifests/2.1.1/opensearch-2.1.1.yml create mode 100644 release-notes/opensearch-release-notes-2.1.0.md create mode 100644 scripts/gradle/gradle-check.sh create mode 100644 src/sign_workflow/signer_pgp.py create mode 100644 src/sign_workflow/signer_windows.py create mode 100644 src/sign_workflow/signers.py create mode 100644 tests/jenkins/TestPromoteContainer.groovy create mode 100644 tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDocker.jenkinsfile.txt create mode 100644 tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerECRLatestMajor.jenkinsfile.txt create mode 100644 tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerLatest.jenkinsfile.txt create mode 100644 tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerLatestMajor.jenkinsfile.txt create mode 100644 tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerMajor.jenkinsfile.txt create mode 100644 tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToECRLatestMajor.jenkinsfile.txt create mode 100644 tests/tests_sign_workflow/test_signer_pgp.py create mode 100644 tests/tests_sign_workflow/test_signer_windows.py create mode 100644 tests/tests_sign_workflow/test_signers.py create mode 100644 vars/promoteContainer.groovy diff --git a/.github/ISSUE_TEMPLATE/release_template.md b/.github/ISSUE_TEMPLATE/release_template.md index 2cb98550fc..437251c028 100644 --- a/.github/ISSUE_TEMPLATE/release_template.md +++ b/.github/ISSUE_TEMPLATE/release_template.md @@ -61,6 +61,7 @@ __REPLACE with OpenSearch wide initiatives to improve quality and consistency.__ ### Release testing - _Ends __REPLACE_RELEASE-minus-6-days___ - [ ] Declare a release candidate build, and provide the instructions with the release candidates for teams on testing (__REPLACE_RELEASE-minus-8-days__). +- [ ] Stop builds for this version of OpenSearch and/or OpenSearch Dashboards in order to avoid accidental commits going in unknowingly. Restart only if necessary else manually run the build workflow and declare new release candidate. - [ ] Sanity Testing (__REPLACE_RELEASE-minus-8-days__ - __REPLACE_RELEASE-minus-6-days__): Sanity testing and fixing of critical issues found by teams. Teams test their components within the distribution, ensuring integration, backwards compatibility, and perf tests pass. - [ ] Publish all test results in the comments of this issue. @@ -81,11 +82,11 @@ __REPLACE with OpenSearch wide initiatives to improve quality and consistency.__ - [ ] Create [release tags](https://github.com/opensearch-project/opensearch-build/blob/main/jenkins/release-tag/release-tag.jenkinsfile) for each component (Jenkins job name: release-tag-creation). - [ ] Replace refs in [manifests/{{ env.VERSION }}](/opensearch-project/opensearch-build/tree/main/manifests/{{ env.VERSION }}) with tags and remove checks. +- [ ] If this is a major or minor version release, stop building previous patch version. - [ ] Generate distribution release notes reviewed by PM team for opensearch-build repository. - [ ] Increment version for Helm Charts [(sample PR)](https://github.com/opensearch-project/helm-charts/pull/246) for the `{{ env.VERSION }}` release. - [ ] Increment version for Ansible Charts [(sample PR)](https://github.com/opensearch-project/ansible-playbook/pull/50) for the `{{ env.VERSION }}` release. - [ ] Prepare [for next patch release](https://github.com/opensearch-project/opensearch-plugins/blob/main/META.md#increment-a-version-in-every-plugin) by incrementing patch versions for each component. -- [ ] Lower the [frequency of builds](https://github.com/opensearch-project/opensearch-build/pull/1475) for this version of OpenSearch and/or OpenSearch Dashboards. - [ ] Update [this template](https://github.com/opensearch-project/opensearch-build/blob/main/.github/ISSUE_TEMPLATE/release_template.md) with any new or missed steps. - [ ] Create an issue for a retrospective, solicit feedback, and publish a summary. @@ -93,7 +94,7 @@ __REPLACE with OpenSearch wide initiatives to improve quality and consistency.__ __Replace with links to all component tracking issues.__ -| Component | On track | Notes | +| Component | On track | Release Notes | | --------- | -------- | ----- | | {COMPONENT_ISSUE_LINK} | {INDICATOR}} | {STATUS} | diff --git a/.github/workflows/manifests.yml b/.github/workflows/manifests.yml index e01a5282cb..2166c2fca3 100644 --- a/.github/workflows/manifests.yml +++ b/.github/workflows/manifests.yml @@ -10,25 +10,59 @@ on: - cron: 0 0 * * * jobs: + list-manifests11: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - uses: actions/checkout@v2 + - id: set-matrix + run: echo "::set-output name=matrix::$(ls manifests/**/opensearch*.yml | awk -F/ '{if($2<2)print$0}' | jq -R -s -c 'split("\n")[:-1]')" - list-manifests: + list-manifests17: runs-on: ubuntu-latest outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} steps: - uses: actions/checkout@v2 - id: set-matrix - run: echo "::set-output name=matrix::$(ls manifests/**/opensearch*.yml | jq -R -s -c 'split("\n")[:-1]')" + run: echo "::set-output name=matrix::$(ls manifests/**/opensearch*.yml | awk -F/ '{if($2>2)print$0}' | jq -R -s -c 'split("\n")[:-1]')" + + manifest-checks-jdk11: + needs: list-manifests11 + runs-on: ubuntu-latest + env: + PYTHON_VERSION: 3.7 + JDK_VERSION: 11 + strategy: + matrix: + manifest: ${{ fromJson(needs.list-manifests11.outputs.matrix) }} + steps: + - uses: actions/checkout@v2 + - name: Set Up JDK ${{ env.JDK_VERSION }} + uses: actions/setup-java@v1 + with: + java-version: ${{ env.JDK_VERSION }} + - name: Set up Python ${{ env.PYTHON_VERSION }} + uses: actions/setup-python@v2 + with: + python-version: ${{ env.PYTHON_VERSION }} + - name: Install Pipenv and Dependencies + run: | + python -m pip install --upgrade pipenv wheel + - name: OpenSearch Manifests + run: |- + ./ci.sh ${{ matrix.manifest }} --snapshot - check: - needs: list-manifests + manifest-checks-jdk17: + needs: list-manifests17 runs-on: ubuntu-latest env: PYTHON_VERSION: 3.7 - JDK_VERSION: 14 + JDK_VERSION: 17 strategy: matrix: - manifest: ${{ fromJson(needs.list-manifests.outputs.matrix) }} + manifest: ${{ fromJson(needs.list-manifests17.outputs.matrix) }} steps: - uses: actions/checkout@v2 - name: Set Up JDK ${{ env.JDK_VERSION }} diff --git a/Pipfile.lock b/Pipfile.lock index f5e633f5ee..98bd58b99f 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -18,11 +18,10 @@ "default": { "atomicwrites": { "hashes": [ - "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197", - "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a" + "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11" ], "index": "pypi", - "version": "==1.4.0" + "version": "==1.4.1" }, "attrs": { "hashes": [ @@ -41,10 +40,11 @@ }, "certifi": { "hashes": [ - "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872", - "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569" + "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d", + "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412" ], - "version": "==2021.10.8" + "markers": "python_version >= '3.6'", + "version": "==2022.6.15" }, "cfgv": { "hashes": [ @@ -56,19 +56,19 @@ }, "charset-normalizer": { "hashes": [ - "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597", - "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df" + "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5", + "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413" ], - "markers": "python_version >= '3'", - "version": "==2.0.12" + "markers": "python_version >= '3.6'", + "version": "==2.1.0" }, "click": { "hashes": [ - "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1", - "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb" + "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", + "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" ], - "markers": "python_version >= '3.6'", - "version": "==8.0.4" + "markers": "python_version >= '3.7'", + "version": "==8.1.3" }, "coverage": { "hashes": [ @@ -133,11 +133,11 @@ }, "filelock": { "hashes": [ - "sha256:9cd540a9352e432c7246a48fe4e8712b10acb1df2ad1f30e8c070b82ae1fed85", - "sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0" + "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404", + "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04" ], "markers": "python_version >= '3.7'", - "version": "==3.6.0" + "version": "==3.7.1" }, "flake8": { "hashes": [ @@ -149,27 +149,27 @@ }, "identify": { "hashes": [ - "sha256:3f3244a559290e7d3deb9e9adc7b33594c1bc85a9dd82e0f1be519bf12a1ec17", - "sha256:5f06b14366bd1facb88b00540a1de05b69b310cbc2654db3c7e07fa3a4339323" + "sha256:0dca2ea3e4381c435ef9c33ba100a78a9b40c0bab11189c7cf121f75815efeaa", + "sha256:3d11b16f3fe19f52039fb7e39c9c884b21cb1b586988114fbe42671f03de3e82" ], "markers": "python_version >= '3.7'", - "version": "==2.4.12" + "version": "==2.5.1" }, "idna": { "hashes": [ "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" ], - "markers": "python_version >= '3'", + "markers": "python_version >= '3.5'", "version": "==3.3" }, "importlib-metadata": { "hashes": [ - "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6", - "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539" + "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670", + "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23" ], "markers": "python_version < '3.8'", - "version": "==4.11.3" + "version": "==4.12.0" }, "iniconfig": { "hashes": [ @@ -203,32 +203,32 @@ }, "mypy": { "hashes": [ - "sha256:080097eee5393fd740f32c63f9343580aaa0fb1cda0128fd859dfcf081321c3d", - "sha256:0d3bcbe146247997e03bf030122000998b076b3ac6925b0b6563f46d1ce39b50", - "sha256:0dd441fbacf48e19dc0c5c42fafa72b8e1a0ba0a39309c1af9c84b9397d9b15a", - "sha256:108f3c7e14a038cf097d2444fa0155462362c6316e3ecb2d70f6dd99cd36084d", - "sha256:3bada0cf7b6965627954b3a128903a87cac79a79ccd83b6104912e723ef16c7b", - "sha256:3cf77f138efb31727ee7197bc824c9d6d7039204ed96756cc0f9ca7d8e8fc2a4", - "sha256:42c216a33d2bdba08098acaf5bae65b0c8196afeb535ef4b870919a788a27259", - "sha256:465a6ce9ca6268cadfbc27a2a94ddf0412568a6b27640ced229270be4f5d394d", - "sha256:6a8e1f63357851444940351e98fb3252956a15f2cabe3d698316d7a2d1f1f896", - "sha256:745071762f32f65e77de6df699366d707fad6c132a660d1342077cbf671ef589", - "sha256:818cfc51c25a5dbfd0705f3ac1919fff6971eb0c02e6f1a1f6a017a42405a7c0", - "sha256:8e5974583a77d630a5868eee18f85ac3093caf76e018c510aeb802b9973304ce", - "sha256:8eaf55fdf99242a1c8c792247c455565447353914023878beadb79600aac4a2a", - "sha256:98f61aad0bb54f797b17da5b82f419e6ce214de0aa7e92211ebee9e40eb04276", - "sha256:b2ce2788df0c066c2ff4ba7190fa84f18937527c477247e926abeb9b1168b8cc", - "sha256:b30d29251dff4c59b2e5a1fa1bab91ff3e117b4658cb90f76d97702b7a2ae699", - "sha256:bf446223b2e0e4f0a4792938e8d885e8a896834aded5f51be5c3c69566495540", - "sha256:cbcc691d8b507d54cb2b8521f0a2a3d4daa477f62fe77f0abba41e5febb377b7", - "sha256:d051ce0946521eba48e19b25f27f98e5ce4dbc91fff296de76240c46b4464df0", - "sha256:d61b73c01fc1de799226963f2639af831307fe1556b04b7c25e2b6c267a3bc76", - "sha256:eea10982b798ff0ccc3b9e7e42628f932f552c5845066970e67cd6858655d52c", - "sha256:f79137d012ff3227866222049af534f25354c07a0d6b9a171dba9f1d6a1fdef4", - "sha256:fc5ecff5a3bbfbe20091b1cad82815507f5ae9c380a3a9bf40f740c70ce30a9b" + "sha256:006be38474216b833eca29ff6b73e143386f352e10e9c2fbe76aa8549e5554f5", + "sha256:03c6cc893e7563e7b2949b969e63f02c000b32502a1b4d1314cabe391aa87d66", + "sha256:0e9f70df36405c25cc530a86eeda1e0867863d9471fe76d1273c783df3d35c2e", + "sha256:1ece702f29270ec6af25db8cf6185c04c02311c6bb21a69f423d40e527b75c56", + "sha256:3e09f1f983a71d0672bbc97ae33ee3709d10c779beb613febc36805a6e28bb4e", + "sha256:439c726a3b3da7ca84a0199a8ab444cd8896d95012c4a6c4a0d808e3147abf5d", + "sha256:5a0b53747f713f490affdceef835d8f0cb7285187a6a44c33821b6d1f46ed813", + "sha256:5f1332964963d4832a94bebc10f13d3279be3ce8f6c64da563d6ee6e2eeda932", + "sha256:63e85a03770ebf403291ec50097954cc5caf2a9205c888ce3a61bd3f82e17569", + "sha256:64759a273d590040a592e0f4186539858c948302c653c2eac840c7a3cd29e51b", + "sha256:697540876638ce349b01b6786bc6094ccdaba88af446a9abb967293ce6eaa2b0", + "sha256:9940e6916ed9371809b35b2154baf1f684acba935cd09928952310fbddaba648", + "sha256:9f5f5a74085d9a81a1f9c78081d60a0040c3efb3f28e5c9912b900adf59a16e6", + "sha256:a5ea0875a049de1b63b972456542f04643daf320d27dc592d7c3d9cd5d9bf950", + "sha256:b117650592e1782819829605a193360a08aa99f1fc23d1d71e1a75a142dc7e15", + "sha256:b24be97351084b11582fef18d79004b3e4db572219deee0212078f7cf6352723", + "sha256:b88f784e9e35dcaa075519096dc947a388319cb86811b6af621e3523980f1c8a", + "sha256:bdd5ca340beffb8c44cb9dc26697628d1b88c6bddf5c2f6eb308c46f269bb6f3", + "sha256:d5aaf1edaa7692490f72bdb9fbd941fbf2e201713523bdb3f4038be0af8846c6", + "sha256:e999229b9f3198c0c880d5e269f9f8129c8862451ce53a011326cad38b9ccd24", + "sha256:f4a21d01fc0ba4e31d82f0fff195682e29f9401a8bdb7173891070eb260aeb3b", + "sha256:f4b794db44168a4fc886e3450201365c9526a522c46ba089b55e1f11c163750d", + "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492" ], "index": "pypi", - "version": "==0.941" + "version": "==0.961" }, "mypy-extensions": { "hashes": [ @@ -239,10 +239,11 @@ }, "nodeenv": { "hashes": [ - "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b", - "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7" + "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e", + "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b" ], - "version": "==1.6.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", + "version": "==1.7.0" }, "packaging": { "hashes": [ @@ -261,11 +262,11 @@ }, "platformdirs": { "hashes": [ - "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d", - "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227" + "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788", + "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19" ], "markers": "python_version >= '3.7'", - "version": "==2.5.1" + "version": "==2.5.2" }, "pluggy": { "hashes": [ @@ -285,41 +286,41 @@ }, "psutil": { "hashes": [ - "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5", - "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a", - "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4", - "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841", - "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d", - "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d", - "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0", - "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845", - "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf", - "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b", - "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07", - "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618", - "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2", - "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd", - "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666", - "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce", - "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3", - "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d", - "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25", - "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492", - "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b", - "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d", - "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2", - "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203", - "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2", - "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94", - "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9", - "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64", - "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56", - "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3", - "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c", - "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3" + "sha256:068935df39055bf27a29824b95c801c7a5130f118b806eee663cad28dca97685", + "sha256:0904727e0b0a038830b019551cf3204dd48ef5c6868adc776e06e93d615fc5fc", + "sha256:0f15a19a05f39a09327345bc279c1ba4a8cfb0172cc0d3c7f7d16c813b2e7d36", + "sha256:19f36c16012ba9cfc742604df189f2f28d2720e23ff7d1e81602dbe066be9fd1", + "sha256:20b27771b077dcaa0de1de3ad52d22538fe101f9946d6dc7869e6f694f079329", + "sha256:28976df6c64ddd6320d281128817f32c29b539a52bdae5e192537bc338a9ec81", + "sha256:29a442e25fab1f4d05e2655bb1b8ab6887981838d22effa2396d584b740194de", + "sha256:3054e923204b8e9c23a55b23b6df73a8089ae1d075cb0bf711d3e9da1724ded4", + "sha256:32c52611756096ae91f5d1499fe6c53b86f4a9ada147ee42db4991ba1520e574", + "sha256:3a76ad658641172d9c6e593de6fe248ddde825b5866464c3b2ee26c35da9d237", + "sha256:44d1826150d49ffd62035785a9e2c56afcea66e55b43b8b630d7706276e87f22", + "sha256:4b6750a73a9c4a4e689490ccb862d53c7b976a2a35c4e1846d049dcc3f17d83b", + "sha256:56960b9e8edcca1456f8c86a196f0c3d8e3e361320071c93378d41445ffd28b0", + "sha256:57f1819b5d9e95cdfb0c881a8a5b7d542ed0b7c522d575706a80bedc848c8954", + "sha256:58678bbadae12e0db55186dc58f2888839228ac9f41cc7848853539b70490021", + "sha256:645bd4f7bb5b8633803e0b6746ff1628724668681a434482546887d22c7a9537", + "sha256:799759d809c31aab5fe4579e50addf84565e71c1dc9f1c31258f159ff70d3f87", + "sha256:79c9108d9aa7fa6fba6e668b61b82facc067a6b81517cab34d07a84aa89f3df0", + "sha256:91c7ff2a40c373d0cc9121d54bc5f31c4fa09c346528e6a08d1845bce5771ffc", + "sha256:9272167b5f5fbfe16945be3db475b3ce8d792386907e673a209da686176552af", + "sha256:944c4b4b82dc4a1b805329c980f270f170fdc9945464223f2ec8e57563139cf4", + "sha256:a6a11e48cb93a5fa606306493f439b4aa7c56cb03fc9ace7f6bfa21aaf07c453", + "sha256:a8746bfe4e8f659528c5c7e9af5090c5a7d252f32b2e859c584ef7d8efb1e689", + "sha256:abd9246e4cdd5b554a2ddd97c157e292ac11ef3e7af25ac56b08b455c829dca8", + "sha256:b14ee12da9338f5e5b3a3ef7ca58b3cba30f5b66f7662159762932e6d0b8f680", + "sha256:b88f75005586131276634027f4219d06e0561292be8bd6bc7f2f00bdabd63c4e", + "sha256:c7be9d7f5b0d206f0bbc3794b8e16fb7dbc53ec9e40bbe8787c6f2d38efcf6c9", + "sha256:d2d006286fbcb60f0b391741f520862e9b69f4019b4d738a2a45728c7e952f1b", + "sha256:db417f0865f90bdc07fa30e1aadc69b6f4cad7f86324b02aa842034efe8d8c4d", + "sha256:e7e10454cb1ab62cc6ce776e1c135a64045a11ec4c6d254d3f7689c16eb3efd2", + "sha256:f65f9a46d984b8cd9b3750c2bdb419b2996895b005aefa6cbaba9a143b1ce2c5", + "sha256:fea896b54f3a4ae6f790ac1d017101252c93f6fe075d0e7571543510f11d2676" ], "index": "pypi", - "version": "==5.9.0" + "version": "==5.9.1" }, "py": { "hashes": [ @@ -347,19 +348,19 @@ }, "pyparsing": { "hashes": [ - "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea", - "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484" + "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", + "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" ], - "markers": "python_version >= '3.6'", - "version": "==3.0.7" + "markers": "python_full_version >= '3.6.8'", + "version": "==3.0.9" }, "pytest": { "hashes": [ - "sha256:841132caef6b1ad17a9afde46dc4f6cfa59a05f9555aae5151f73bdf2820ca63", - "sha256:92f723789a8fdd7180b6b06483874feca4c48a5c76968e03bb3e7f806a1869ea" + "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c", + "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45" ], "index": "pypi", - "version": "==7.1.1" + "version": "==7.1.2" }, "pytest-cov": { "hashes": [ @@ -406,11 +407,11 @@ }, "requests": { "hashes": [ - "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61", - "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d" + "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983", + "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349" ], "index": "pypi", - "version": "==2.27.1" + "version": "==2.28.1" }, "retry": { "hashes": [ @@ -430,11 +431,11 @@ }, "setuptools": { "hashes": [ - "sha256:6599055eeb23bfef457d5605d33a4d68804266e6cb430b0fb12417c5efeae36c", - "sha256:782ef48d58982ddb49920c11a0c5c9c0b02e7d7d1c2ad0aa44e1a1e133051c96" + "sha256:16923d366ced322712c71ccb97164d07472abeecd13f3a6c283f6d5d26722793", + "sha256:db3b8e2f922b2a910a29804776c643ea609badb6a32c4bcc226fd4fd902cce65" ], "markers": "python_version >= '3.7'", - "version": "==60.10.0" + "version": "==63.1.0" }, "six": { "hashes": [ @@ -470,33 +471,33 @@ }, "typed-ast": { "hashes": [ - "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e", - "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344", - "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266", - "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a", - "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd", - "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d", - "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837", - "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098", - "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e", - "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27", - "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b", - "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596", - "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76", - "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30", - "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4", - "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78", - "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca", - "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985", - "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb", - "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88", - "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7", - "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5", - "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e", - "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7" + "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2", + "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1", + "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6", + "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62", + "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac", + "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d", + "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc", + "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2", + "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97", + "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35", + "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6", + "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1", + "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4", + "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c", + "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e", + "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec", + "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f", + "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72", + "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47", + "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72", + "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe", + "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6", + "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3", + "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66" ], "markers": "python_version < '3.8'", - "version": "==1.5.2" + "version": "==1.5.4" }, "types-pyyaml": { "hashes": [ @@ -508,50 +509,49 @@ }, "types-requests": { "hashes": [ - "sha256:5d6f77f3c7565659bdb7b7bce1d33d1abb7d0b056138cac714860e13da2f19df", - "sha256:cf0646031dd6307113b37814f743c04f0707a3357378c2bb1326f848412f5ba9" + "sha256:85383b4ef0535f639c3f06c5bbb6494bbf59570c4cd88bbcf540f0b2ac1b49ab", + "sha256:9863d16dfbb3fa55dcda64fa3b989e76e8859033b26c1e1623e30465cfe294d3" ], "index": "pypi", - "version": "==2.27.13" + "version": "==2.28.0" }, "types-urllib3": { "hashes": [ - "sha256:24d64e441168851eb05f1d022de18ae31558f5649c8f1117e384c2e85e31315b", - "sha256:bd0abc01e9fb963e4fddd561a56d21cc371b988d1245662195c90379077139cd" + "sha256:20588c285e5ca336d908d2705994830a83cfb6bda40fc356bbafaf430a262013", + "sha256:8bb3832c684c30cbed40b96e28bc04703becb2b97d82ac65ba4b968783453b0e" ], - "version": "==1.26.11" + "version": "==1.26.16" }, "typing-extensions": { "hashes": [ - "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42", - "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2" + "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02", + "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6" ], - "markers": "python_version >= '3.6'", - "version": "==4.1.1" + "markers": "python_version >= '3.7'", + "version": "==4.3.0" }, "urllib3": { "hashes": [ - "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14", - "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e" + "sha256:8298d6d56d39be0e3bc13c1c97d133f9b45d797169a0e11cdd0e0489d786f7ec", + "sha256:879ba4d1e89654d9769ce13121e0f94310ea32e8d2f8cf587b77c08bbcdb30d6" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", - "version": "==1.26.9" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'", + "version": "==1.26.10" }, "validators": { "hashes": [ - "sha256:0143dcca8a386498edaf5780cbd5960da1a4c85e0719f3ee5c9b41249c4fefbd", - "sha256:37cd9a9213278538ad09b5b9f9134266e7c226ab1fede1d500e29e0a8fbb9ea6" + "sha256:24148ce4e64100a2d5e267233e23e7afeb55316b47d30faae7eb6e7292bc226a" ], "index": "pypi", - "version": "==0.18.2" + "version": "==0.20.0" }, "virtualenv": { "hashes": [ - "sha256:dd448d1ded9f14d1a4bfa6bfc0c5b96ae3be3f2d6c6c159b23ddcfd701baa021", - "sha256:e9dd1a1359d70137559034c0f5433b34caf504af2dc756367be86a5a32967134" + "sha256:288171134a2ff3bfb1a2f54f119e77cd1b81c29fc1265a2356f3e8d14c7d58c4", + "sha256:b30aefac647e86af6d82bfc944c556f8f1a9c90427b2fb4e3bfbf338cb82becf" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==20.13.3" + "version": "==20.15.1" }, "yamlfix": { "hashes": [ @@ -563,18 +563,18 @@ }, "yamllint": { "hashes": [ - "sha256:3934dcde484374596d6b52d8db412929a169f6d9e52e20f9ade5bf3523d9b96e" + "sha256:e688324b58560ab68a1a3cff2c0a474e3fed371dfe8da5d1b9817b7df55039ce" ], "index": "pypi", - "version": "==1.26.3" + "version": "==1.27.1" }, "zipp": { "hashes": [ - "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d", - "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375" + "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad", + "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099" ], "markers": "python_version >= '3.7'", - "version": "==3.7.0" + "version": "==3.8.0" } }, "develop": {} diff --git a/build.gradle b/build.gradle index 866c181b04..992f59583c 100644 --- a/build.gradle +++ b/build.gradle @@ -28,7 +28,7 @@ repositories { } dependencies { - compile group: 'junit', name: 'junit', version: '4.13.2' + testImplementation group: 'junit', name: 'junit', version: '4.13.2' implementation group: 'org.codehaus.groovy', name: 'groovy-all', version: '3.0.9', ext: 'pom' implementation group: 'com.cloudbees', name: 'groovy-cps', version: '1.31' testImplementation group: 'org.yaml', name: 'snakeyaml', version: '1.29' @@ -69,19 +69,19 @@ sourceSets { } sharedLibrary { - coreVersion = '2.355' // https://mvnrepository.com/artifact/org.jenkins-ci.main/jenkins-core?repo=jenkins-releases + coreVersion = '2.358' // https://repo.jenkins-ci.org/public/org/jenkins-ci/main/jenkins-core/ testHarnessVersion = '1736.vc72c458c5103' // https://mvnrepository.com/artifact/org.jenkins-ci.main/jenkins-test-harness?repo=jenkins-releases pluginDependencies { - workflowCpsGlobalLibraryPluginVersion = '2.21.3' // https://mvnrepository.com/artifact/org.jenkins-ci.plugins.workflow/workflow-cps-global-lib?repo=jenkins-releases + workflowCpsGlobalLibraryPluginVersion = '570.v21311f4951f8' // https://repo.jenkins-ci.org/public/org/jenkins-ci/plugins/workflow/workflow-cps-global-lib/ // see https://mvnrepository.com/artifact/org.jenkins-ci.plugins/?repo=jenkins-releases for latest dependency('org.jenkins-ci.plugins.workflow', 'workflow-cps', '2.94.1') dependency('org.jenkins-ci.plugins.workflow', 'workflow-multibranch', '2.26.1') - dependency('org.jenkins-ci.plugins', 'pipeline-input-step', '2.12') + dependency('org.jenkins-ci.plugins', 'pipeline-input-step', '449.v77f0e8b_845c4') // https://repo.jenkins-ci.org/public/org/jenkins-ci/plugins/pipeline-input-step/ dependency('org.jenkins-ci.plugins', 'script-security', '1172.v35f6a_0b_8207e') dependency('org.jenkins-ci.plugins', 'credentials', '1112.vc87b_7a_3597f6') dependency('org.jenkins-ci.plugins', 'git-client', '3.10.1') dependency('org.jenkins-ci.plugins', 'junit', '1.55') - dependency('org.jenkins-ci.plugins', 'mailer', '1.34.2') + dependency('org.jenkins-ci.plugins', 'mailer', '408.vd726a_1130320') // https://repo.jenkins-ci.org/public/org/jenkins-ci/plugins/mailer/ } } diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index e708b1c023ec8b20f512888fe07c5bd3ff77bb8f..41d9927a4d4fb3f96a785543079b8df6723c946b 100644 GIT binary patch delta 20926 zcmY(p19zBh*tDC*wr$(CZQHhW$3|n@W@Fn%V>fmhHTa(WuDw3|hwGe~>zEmy1FKsG zYYc=z@M+Z>Uk4n- zf>LPE!P?mA5#!>@QlN|1%u#eAY%z9sYzTix2)?dl^qr+FV;S+1iF%X=EN6X@efcip zx4L{6MHen@KT&~3ddxw!vGK3 zDR6IzmfS(C#hBd@wn!OgvMoF}phsEk&F5-Dcwt7G2xG&Dm&xutI)E-Va!-qKz~+w0 z-=AFd+H(~(Q$3%N5nez;ZIxbBM31j>5Nyo-YkiExY1M<@u<0e*nz!!R z;{N$-qP&QO{9nWv^INxb>J`g-yYMA$eDo8qb{Bw9^fZ9m+S(Rz2Zph#(1yUfaZB?I z#eOI?a)(CpDeqla5F^C|B-C7T7CC2S%N!%mR&iZ=7m$e>8JAYv-&Am?exYu9F)s@^ z9C)0W-|mW~Vu~>&H5kvxytGG67Zv0pEg}b-m(ggB8~^+aXZ&XbbIGOp!bkEM{Np3q z@-SX2K#W$Hez?IRlyxVVm5t}P- zltiFvZ&=0@Q}LqUpz=6(h07TA`ZYSz8rFm{Z{-~Qw!}yL8*=dtF@T_H90~mu8Kw1t z)le9013)H|!YcV=K?2_d9ifA*Q*M@vBRhpdibeK-gIY}{cl&GETL*)(oq?%BoP{H$ zn4O~f$L0bBm?qk}Rxw_2yYt*IM#^$v;IJSd(9j_NsR~GbNZnQu7zjwxm0I8$)sVjq#M(yl^fk=Y`b_$ZVpEG;yCH|Z~I1>MTYdpi8P>+NQC zE_BSsn_WD^EqD%(G{YUlEBLDQx{o%zvDKPVnupGJe#6t<@AjO#$J70?_*f7K>5NMO zCdGnVcF-Cu*i*B@rqUDnlJ*oFjO4O5fDMd!aWYNYr?1Q%bXxmhTs+GlOuiIos<7s9?Rq}Re!?8dR-lV6wuAMP@lIdDi#5Rjy`J^G=>=w^ zv-=qd_E^Jjec?ZYvRRjl)ZU`Tp|r;fQ0+e;vL#MSm0`uzNi*svh0g|21$yHVsskBt}fvlw5cR}CPTD)g#ZN9hWkzJiL`q# zI0YW?x=^LciAbCH`Blg1^v-&f2K#)4q@^MJV*02DZqX0X-h=qdoEF$}M~SpY3pzsk zjSrpF@05PZM}QhiFzr&-AQw3u5F}%7#F0rPla{VYb0~aE6$(UFm010IA@ar_IZzG_ zmSKga>0=esGyeC;)gc^j&8@M-tPu*a1l=rx;Tmi~=p^ccq;fJgp;+R4&O}&r_s$&9 z^bPU<-gBa}(hLnM2uLMmN+AjrFscLNt+$#cIIg?f@`S%7dnhgg4cg3YC<6`i+c=5< zitavH+cN}B)VnF)fufnbw1PgBBDLI48@83c%)KbAY+(VFXHdA10mkp#-u?N!HIIgE zrq9#*^6RCKN~bwo<}~Lv$NxUyCExF+^ECgl!0qOj(f6zy6Y3)EmkP})un2gc37z-z zpMADl2Uab7drwFZd7rtwr)2~x^xrR;u?I)Um^>$E$nl#uiaq5T@=h_rpMy=9wp*hw zR>EfZS|j?648RT6R_RlASXJrQJBLSNx|T%-@NbDV+~Y6KVAyLEXPp)y<~KAN9Y7H3 z4#5ey|6qDp(DP5oG^Ec4+%yoq&kzKa4jxBeKo{vzW>pvI9~W|Zwue`HMALHOduIe6{6Gf40 zRLkq<1&{5L2TP>S)b`5l8fWRB@9H;NJ~g6L7`uNCYJ7xGu0_WX!y8n*E2h?~d*n_o z)z>t38Qk&FyCXF?)d^L7v`d>XW|HN4diuv0MOM&r!&)RoHO(3d+e<4FVv zIM&Bs#*1A9dU$XEB1POPbt`fUTx0WxVE6s~u2vq?k(r4?$1xH5+uPlhot8Sk^|j|+ z<;Ds;`#is=0ADlpL^-E`>NyK^HV zP%0cOvzyynZW>O0)U7pjV9f+WW()Oo72Vyvbx3?y7jT}yua~En>kC*bNI$B*D~i5EwtR-PR+E)dDo{=}GMv@e~Jo=F#|ab_Ui3^ZPl zj*_7V>L+e+;<6-J%cYu#^H`HFBM|ri(7NtrF)>n@v@7e;v8E^M29ngLY!|gePuwOG zH*%$9l(}SYGEttK>CHo%CWvCpwjjgD$JHD0se~WB%CNYsoB~d+yy!&Rc9{W5DrEVb zZd0N2!7hwb&I9?aS<*SoJw=J8UF4|K5VV#+Xw!!bMHv##=j0jsKab-5a&%4%MY0v~98iJ4 z?9Uk;!%6D*%aJ|&F3JYXfQwRDzgSW1)S76ku1d|-3>O8xmwvAA7v|M?Ll*{=i? zE;5}7yed-bGu@ZphkjV-lUM-@21k*vbhtwF*$oft>|eZq*pbw04y;i1y-J|`(fC_i zZM!(?)nquXW1|jB@TV^=GRiqmmSU!4hsfD;*pQO#2ScFjQN`PqymvOi@+(fD=+Q0o zR>40M7~Fea4o%(Vq{_JCsjE3+$cW_o#h|gh6DtWf{Ag}nPtw3TywPd`Yh6aED)@D8iZ(Puv5=hi;?ev&|m|%CuVP&vGeS0h=NykRI=q**z z60h@d-2M?JyAOdc!8kg^9b(Y-B8@eecwnFb#5-k!2!)+u(bhkE{&&!vQ8#(JX?oh{ zzr*y3>wpKlprHoa58Qsle}7*bD*MHcxL#*L`>vKYBw)eRgp~m#c6{u3&Z~rxA%sg0 zH7*x3#}>yIR81IYW`e^Hp-&&rFF@mkD_rJEj=OC)RC9~n#e;34 zB8ucD9wIh6e_MT%XxqoAnBp>-7#J;V4uUKF1F9xN$N?m?DQo=jTXR0tNbg=X1LV}H!7!x&-6z@D#<}1l}M|wUee!@W4|eZ zE-ri-P+EYIjgckuXi|^{T(G=<|0AU}Br-NL2O@LyVX)sgW+vn%8R_(#qh9G~!wT$a z|M-?u@I8YuP1|w0#g02jiy+lkdeWC$ssO?dePpkPKNP*Mal{SO^alvrKVtC8(4Tp! z^HN%W6Es(Je!}?y`44yS()^H{GX8Y$Re~TmzzVf=s4A$#6f$!lz#&Od2M*d76UN$IZSD83`o#6EFYrYGq z{S)+_qW9B<5~~hu2a1KJ4;(jyF;r3>ZZUwS1mbs5lw&(KhH()Es}?izw`cI+?7x)-??%CsoK9;>6{ zzD`I6_vk=3VvfF?&3lZ1Viq^ZH+hPn_4;fiYt!uKd1|(1((AufUDb0`UD=E!O50*b z+jL#1#(%21l14=h#ZU}qc26Gu8W%vJlk_7$DMjjU{XOsu4lkrXgroX+Jb;2=cmnOy zZ}2+e3eiM8vhW^t((WV}dfHrPZM4^KxfvZnZ&BZUnQ3P3csN1g>KdGqnC#6XbsaSz z*PkQs)Fs>C$cuog9;bo_?3afb`wO>5utUCcq8Q=3zchtyFid@+Y8R@bt`y)_i9u~s za?+Y_TV;S-IJ!x8+SZl3bwREuYknK$o^u8R#cQEdI8HHJvhm?HNX__AH*T%dzL!_@ zpHpP(_PfPZA2ebp#O%Rj(BgpBx%x;%TwFVa?qwB?QEFLm2sCh3nF8(yxJu``PUoAf z{nHJW)+YnmOUaQor!cx{MX@&(%`UnE``zAgYq`}Aa|{Bt4SzM$CY^LNHt==%bbaT= zN=>HRUh|=>gG+JjruW0Dbr-68sLoZnp0xS{hNBr(W`OhSL*=>=nV z%U^=k{5w&f0}8CB8z6$9kiCcUC|VKDx^VTkY*?OLr)R$Pa z6MvHJfG9W~OSq#INO3)~@{Vx0({U|0^q_8N8vhYAHp4*O#9pKM&7(jC{RY>qFE<}t zfu22LjW2-ov>`XY3>WoHV*NtuYr#E^!yA75XT%X}VR}IdMS98?^vRc zHqgt)Dl^B}DyimTyvhuOf_%c7^Uw+{P+Z}BNa+RpFFtUIU%>#@x4X##o0nWfAdIuC z|I@({>IAWLfv+r7;#r8OA}}kE{O$7mWgnUDwj2H^&H{Vez@i% zNFs=^7Y}f8X8zYI=ybGM90@A;UT z6C>>adZvv`Y~6kJ&C~KscaL!#&fOs5>4taDk%iFRlz;y&T#T5L=Mv{pG9n^dKd@pi zT*hobD$qPd~1Ek_On}pk<}}&>&s@i^<)ORpblTmmY6x zj3X*t)A;3|ng^*KBA1lkK7iN@or3~C$H0A2C%rjjxIO^-ICww)MD=qaXyBjPQ*Pmm z6zZ#+w=+0rn{|8f?gzvtg>SDkI}n~fFp-p7mnhwR7!fVEsdUy*RMP0okS1^J7a7I^ zdInUGLO#ob2+ZNbfXj>~7m%E4OJk;~aknUFj%U^;G>T{7kF^ZnbS=9xKAef-iB!5e zU?||ouINGYLiQK{^pPZ&h)?{gt8fF$vC>r)L2((6jmznLN;xB3p)lz`(x$+${-w)l+WLX>e+#z{KXU3b(zFfTXJ`+)hr%Lc z>75w!kfN^GcUXS6XcgW-G zV%Oqm(gF#-Xi|9=?IC0m7;=ANVN~&bkl5B_#2d%aT|x@QL-&eg$ryqPEGidR#oUxe z&=Ey1-`mym-jqY`H>(%-u4dwZH$nFH$3L@l-+qs~@QH%=3l<=Dqofe?>P-;yszrwz zuHFgw`8E4Kw6f%#;PYC}86jA&_o708Avp|_<~?f9N}^j}kNn`YhPuocZI38ppXz9h zv*BQk#*E8kgUY>bk77)(9^%Wy!C%^&Q9SgX#YC>RdrJ&ZCzU%*3=i*|7~LL&K|Xc* zG|-z-K8)?t@ox37J4cM$!Ow@wURUn|{N3AesE>}qVsxa5Hz*B%Xr$^_W>s21lBN8R zlu(tqexHn%^B_5f&v_$}&UIMo(_4Fx?BUVO_5O%fFjy)5K<%|PWL|nss!TdrD0Y7G z;E}d3h^hJ&wXb%cj@I+A2Gq^#%FYI^o#_19anGx?#7^s9QoVpcoiXLLc2XJZk1`x* zntj3u*)wKvvGQl&52G3$VF!!@>FwWnaRh9&grC|gKP9t2eck&VC64(Oo;HS)!Umcf zZ4fvRb>4+ntoa?z$;cvBJBG6eovpf`q;nPDOg}I((RkI*noA7YBd8mIO*0)~1-acS zJH5upSDst~BOXl?(?ffPLw=?U<>rzc6q2 z_(4(OQXpGkOvrHr!W&-KJf%HZ8&wIdobcrc=aljc3g6JHPo?`4y!kbmp9QHBJ&Eh5 z+-8#X5xK$p`P4;O6M-cV7nm+STSQ`W1=>IzmM3vjBdxYMkNx>yW$}&5^aa+bkNW(~ z_8D=R5YoWH{XQTp2ro{1?BMK}>1xG#_^XItH&DN3Dcypu1|FmFtwdhQ#+;JlFkQ3y!`Qwj8xE0mJ3SN-m9^8h3z%jI9+LNm zG{Ds&C=l#|sisMR~!`4W58e~;umktsyI?nBU)%g+QH2S)e{3v zk0>#g1h3#F#O(`qLjC?&o;1%^gfOO_&^>RilU3cXHu=*S;dHPC+gEbX{YvPg2#a1I zFA1+_yz}ky#qJLf2`$`-eMk=`a(sX%vcyuRw1_Fevqj+s#uU)Jc19TOXW){0XGfsq zt~lc>Y2DEw^p81#|MBZsrMYxvpHjPF%q^d^BQNZqm2eIL5*?A+$x$Wabj)P>_9hQr zK&J&V+ncN@>=nrk<+<03g!U6bbv+3eDZEZECcCIczhr>H0*(&|VD*j*XS@HXIs(|I zy&SoofwPMi)|pEO4vk#*`Z4(H4}`o$2LTRVakG>M^#C{u-0=NO1}9uaX{R;p); zBTsTmb4(heR}K~0x;um=Z-vTYd1JX6!o(a;=Yhf$mI&tGO!GU?_ppfBn#}PsKOuy; zt+Sepg#f>076B9R3?>D7qr8+zgYg8s&o)YS7PV?RE%9(lT8T7L(CkV`wW{ZLD1EdR zXAP7V4i>2y3&|Ltn99Wwe;Iw^$52w+dLQbtx$xTf6yD~-#pd7?2zFc!rI#_K5g+Vs zO5D+8AVRW1|G=O1EnbmUSx=Ma}A}!vHnKiXFGgl7I zR=-Q_%9F*Z*Z|#Ajbi5tqD`TM)=I_%!lr&c2X5v; zm5hm4rdvWYPMF#VoTW0S3t<_GFbeD~Z-D{)5>EH5_1(9A*hiq88G9G24Np{!<8^pl z131z!r1DKYwN+&CK&Os4LJQ_TP7}|k-G;sC{G$;>AP_5HFbh>WC}tkGd|@moaS~sb z9j)t~HZ|VLJev!?&OoTh1t!bpR=zLZd}^4F(R{Ub5}?u&msH8IFD`2@{h-NAT ztxBm$<+|0is|`&>pVOyjTUTsPjm&YA^UFM$;mkuV7^h(>dTbuNz-gOVe!x60BpY7e z5whoQ_c=0GO++o+*!Xbtva1)8hQtiXoEz9V4E`cX6fjK6xo*adj0Ztni zQ;SK4&p|sG6}&TN+{u+m z5>syBaPtGB{S3A|kNKyD%6&+AhNczIj6Vanq2CIqf{-|%&9J~d-8jK4a=k2OIp$u> zXX&{2ayS~o3if*1-L6Q=lKMmXfl-8#%=@6>rRk;-63C{4l0U5bAo(+Us!s>RogF&4 z6)F~`0<00mcQGulo-Wk80tv}|D%1*nxJIyFU>tpia@5y!u&Ev|Z=kwfuxx771>{=N zu4Uvz*isl?kl8VIF(4}sa4ZO$0&MjY*C$THU~bIy#8P_ia; zH!2nx@xYVHKjY1iS6*BWa6yrJS+8Eg{8v{ zdRV!#Ce3Sd82*H3(;c6R`kLP%mUJv?gg^k4vi}WR28vfyN8-akUR^YR4(xA3SjCa@0>)7$=qcSHH+g>oFJjdLNv38uK$2%<0e>v}vKQV% z4`*eelNE|cO`3$VnEWS)?z%Kn<3o?Y8opNMpj@SP7OR~~ZhJe9TTpfRkdQ2h?R5)H zSxq}*=pCK2)cMij#l+GZKj&RD?l7HBeG%PS(d1DelPWq`FCe3_tf8{V4_;5|zLYMk z`h>I%MjyIj))r3!_y-~73ZZ6A<~Zs}x-Q#V>M)H>y3hu=RZO^8!LNPJ?6`XIreVz{iv z8>Rx^_Nh6T@)k0+oXNkP%oA;TDn8Y-pO%S5YD3zo81A9A98fF;BKcu0Ym?$yHYl&P zDkoxGb(U(n3UAz=s=g2!@rP|6XW}g*X%(X|{KE%bkHG&|9j3r;;HH$Cp{0a#jzf?u zXX$CAsBkd?T0Z{hS_I#HS1i-!LF}mu5S!(gTeBjV)!1 zR%;tNpnnTDbrXHp>HZ2f#mF}4h%S!(6SnJhTGXtQ61XIKR+ISrwDe5bnN3E0d^_&- zx&6G^dwKD5n*Tfh&KOL7^`4HG;%QyC5#c};p#7><%Rq~GIi6Aam9J$aDy zrt3``%xTvLm`=wY)^09rrtC5=#7EsC5`xbdpCr= zgx`Gu$b!g2P-3q?<0$;s68&eA)_Im4^naax(LVOnJHUaV(oYcmPAb>SmMMR#ImA z)QPrY^>dV^-|?e@LTtrWoyv0K3OCC$+S<}Z;hJF#$7qvk-loYcF@N%-M!q{QS8<-W zT!>wam=}8*l92<<_1K}aJ?ZY7Kmsm+w^3BCj|o$d?5sNUX?~r0ZUa*R&NvUXJbN}5 zY{D?sb^7-VM$LnjvucYqrEmbGIzfA^jbk~wO$AxU0LSl`kj`wJok{v_o1FzG*fIx) zt@b~{8TkiZ#|5T9^A2PT!+v-cma|x6kdiPzbQZSFxF&?NmF{-}{Uoh=**-hq2}4g4 zezq3pIKrVf2tG&cjci5Jps*GdGJogGCs?yjB2W8@k5q8l%d{U0+ZV<}_X^ubdte9K zm*58bUwV`MFY>qFMTIz-sSbIe`(y2)L9>^sZ>ih`d<4Z!fd#p*HxCiXz9xkbv8^lJ zslf=T-MM{;4*Gnk4mR9XhKvJub`bq0pZyXc%**vS*~3?1LNOf{L=+;4M_#Cb4f{y1 zB_ULIR1m2mJ@P zu=yjU154*;9#-;FO15gEJetQtiii&n8!>6E8K#o^Q#vAK&Yu+N)`Gx!=bD5=cL#pu zxxAA*H!cU`^qkb>uS#NBIi~tlWxN)SRTn$0!cO}NhAlFyCn}?`oa2wMKUb<7b`6N+ zx?WW>b*-=!PGIQ{s(3m$G|Qe=_9w=QaU|mpZQ%9ssdoR$KD$+w+E0W3WXlE6RaOY_ zVI}A3K`x~yxwINovxx)2DrPJU3RtVOUDc>=eIYSBnPOIRRR;g*td*MH%;fH|&pNZy zn|}H!!>q-RX1|1Tg7|vZ0?Vy%tP#eC8Io^y4jtpa2(_IabJ?*ZO_gzoqN*`kkOw|4 zJf+GZp)QWpsWTQ9D@uD>sCycI_IZv+()VCR^-m6|UYBE5@YcW^zL#!v7~C4E^C@HI z#sEQICG%962}QYr-gLP`Znq7=TabN+bU_ZHHnrei9}k(4nBZXZe6G#dW-|0>(0h!yt?&oJMdJ@<;9A6!j8=uSWl z?1maA?8r(dd?|^~DVNua;V+lh%i&-b@QdL=7w}6Zu`Zy1n(mGtH*^GP>D3?C&N`92 z5X~Uy-)Q!k$e>Iskz+a?7(pVoWl9xQmvUb(xOrzeQ2zt!?axbRq z_vQ|J_)EOzO2T2=P2`?)0{ZNM6Fyw3MsIkMY+J?rA=K=K2~zndIX{7-)fdqRqR72< zS-WrWbPs@mXn3NQlD>eoXq4#rR6H6+KZ~rcF9urE(uD)XLgkXcaQJZei_JS7$)um^ zdULmD6is{aFkeuwkOCPochCdW%=)C^5<-AUjA0O!0!0-SF*zrngGb_EAN;~M@!N}) zisz?90473h;@5d2i{Xhn-}bZE5xBS7}0f_?fGYq*# zrCLC$;CD=56T-jIANc4pBQnb*CSn*bCc?R5^89fkF8TSZiDuILFa{rJ!-t^BjO9=y zDdiUA0bC@n;HxWy)r>-uj>HUg(8;BGi*juc*sDBOQX^((C2GMcE=a3ubt8WA+wq^r zX-G=Zwml$F(o;U{UCChF()zHAepZpxsI>3{F%pSS2UD?eBlUd= zhHv;mhXv$@MiAet%X=-oft}VZu($t-AOB~GSi8SJ9smjgf&=*E-j0>=ng+0yLU-sj;$Q{I-IHgZ)( z3d?M6o~HqGex8;u^Ls@7AoRu?!uUQomZ<2K7T(m$JOmItb9mCmBIBf?Dt})S=s0mX z2AOp?Pj5R<*lRNq=rqrV7`?XBsW`)d+eg|uX(&250DQ)Z*pPfD+y z!~8}hbzLmO#gjfJ|A=2#Iv({ach#E4L+|_d!(s`yF>ICpCog_o!zR_^M0_3I!uW2Mn_H3`2v;#+HK;tCRa5;QE@8k>?EPTsG@If-hoAwz9Cb_W%wD9dB z_YVfyh0TS+Wh!c)rSyxMJerg-&61N1(e!KlMjjXz7YHqdxWf<_G#WI>WJ<@w^aP5C z^B)9R9TAtT{HEBq-hOHuSe_|>$>BHlFBuE@CA_pkET)iFcj1=SRxz^>S63+BqErTv z5**_XasQl?ev$85bu5~(6N0uFId-m4jgDIE2>WItlKFS!{CrYyN7ClOpN$GSsbeg( zLdgX@5$Od2l23AYDdnifmkZh`FwgiUSK*?HkgW3ikcF10b1U+kctu2jz+2-CZ~TKH z?Kj4z)7d7K^&(jp^7TX4;t2;vh|{uAg!BUr9?>8{HSS&QPb{*nrjq>pjBak0?KFJU zz2OxcmaOvt{B18U6VTo=j_<+^DV{)_+`YO*capOLuS$JPy|OaxGxB&9l9( z?bk2AU)Fu!olcglGLXSvf`IpJj^Dh%3;nm-O(&O9|JT5S9+;wNb#I$T_y^AXc=kbq$;gh~ae-#Sg16yBG7r}~@1sXK`|lFF zLUDz6XaUnwhfX=yg}Xre#6G2vQ~DRc!0U9NDdd!vgpy)brfSx<{=7 z!@p_FY1xLNZFqmHtW!MOU}!wGj3DqPHHk5vA-?-_`{>jV2l~7@ z)CpVpvcz`9GGt)nm`fff%nL&9T?>Oy@)Em^f2ZP>cl+2UFVY>xl75w1PFxS5R*|Rw z=hRE)+tDW5y)UNW`H_RyX!>^Y=+Zl}(!IA}kM0wJbm1R+pGt*clPyy}fXcQ(CEjU~h6L{LLq+G8mbGAci=6)=-7Mi($5_GLqhMbBajXSX zW?=tQ`}HY+|P%M7u`Szoia z*7G;{mqMLhJA2(m+bUbUh|$6KzbH*1_6E_g3N z7@z84#6(=J$~!Ryg7xldr>MmmH0Mn&BVRUWmUBiHYs#@MnT)n)XQCsG@Xp?OvJocl zRf#0-;Dwz2`Ln%o&r!M#@ExVw=-G+Ei@B|j=Bh>^II#jl7o)i6bK zk+6E^SDUnH36V7TEl7AFJ$37F&%BHt8L-k^)8=3UDkH)vW7nY5V((+eI>atOU)?a9 zz4FQk&y`4Isp~6C$CTL!%V*d8xT(xfwo*A4vFR^WsT4SzJ`lYMP)(!a?jf`rH?!eH z__TlvwtLfOB|4CVbDunP9&)t}jsn{< z*tjO^J|-5BkSJhK#NC?r=Wg7;qnf95rjW08eVmkeySC{E+d>9n_I^ir%~(utm*UZU zLUk6b5rw8`Zg;JBv1x@meo~zTe#Ib+WknwQFf6T4v^MK5U{e*8Y5w;`C$DX_%<{to zDn*$i6HjTQ+7E((IIqi%zDja$oU*PcztV>4=(qnpjkiK0WKeSB)mWhMJSLc9+hLM2 zDG5ptHvT+9Oc!`;3)>N5Wob=~^tA4>OCmU{q)`j zoW~(%kbs$0J^umZHis_`qoQO3w8&A5+n7!pRFCEgkbq>KTL>RlrZHg}&sw5rY>r4( zhT|+rX&}8_`sOf&n?X*aF9zB?MBf*`Xg)G!?$e&UKsM8~ALG78pGz%G+q-sb`K$WM zyjadV(C~D ze5Zdnfg&_~=T^PJJp#;%%W}}+kkMEyw!g>xxyw{<-&VdJf0@$Db+fZoXwqZQJLSS! z(RsWk)je$_r^6Pj*{o6x-pYI!gg6@1{*1FXU<}n9%6ng98~FFp2Tt423of?|uJ)U| zXQVaD?ck7+@codNZK^i(AG82$elEPoODrxKe`^oJ{kwd zf!B_~#5<8tqLcBTq;6P>xWMXu!~GGY(4Z3T2f7f$>^j01mMaW_%fq1+_PLcIO9AXfCLI^RXPCM)G%xc6CPx{~SEmYQjOMXHlf!DCP zgQZEwmJB&ubf6DI0d<>)v?B6~jv40f}3LRQy za^~uqx#ZzsmE-J$@@NJ>wtSd{A}(Pee8GIL?4KH|-s~`j>sG4e;SFkg)t3!AqRn0N zR#5ArJ3w`~Es4(r8#nlLVq7)WS}$;t1*o=xdqrODP8C;n&5w|Ybg#EAY7a^PJWh16 zAp!T;n44fCXDq~iJjiv@BCV_(NTHBrmT(cM%6yD#q0`;wG7E8Ht?Go}T`QhdCxbWM z^q~KK-BqhlOq)u*CJq2#1x;0;imd(m**bDG4ZLTIn+JC{szC)@ZmKX z+Ap{dsGN|z|3!iGOALihjYnny_{8^^v3{;g9H0FmGYI(|V#xlQ@j({~Fc|d*gPlV} z!}OA&D~vWVdlPz0PuljeoGI>^_2l&?VaKq)#8^zje=(RM=m%Qe-M&GD8lex&PZ`9r zLb&4Z&gBjQ`$DiKLNbp_*k!E0ss{ngSnrX1R0}{RCBBXaAy8-HPrnPWQFU*G@P+ri zvkyq$(C22FsZvrqL{SVI(7GyTl0hz~`7}DUvLktpAN~@V6#8CyHG=%s!!H>{O;dff z{vZl9GD#e3!2K{1G`ahaeU^LiVbl$hF|z7kxfY>M>2%;cRZlx~@H>}IUp|yE@E7T_ z>1US;a{0k$82Jl$^-uwv@l^s=R;PzoG~9z}Pz4?Cp`UR~M0OokRyyqXZN4+k0X)T@TbtdJV<_~>rLHm+$0+2r(ZrnzHjtg3b$@Pddv1s|Cvy6)K+ zSoP@VHZpjXMRs!^MWpeJWzOjlZoB&~#CS;?;dYo6b-nk$9ZvyUehd4Zuz%BG()eJ} zwJ`*v?)Al5I|;|Ks@p5%0gRz1zAU0mJ7ybZzX~+3Cjri76C+u{a8>U;!riO#S zc$}=b(+8p&=rB<74^e$=a|AhwYAOz7JncWv;B-V>)D+?0oZT*){4m-ql$!GR(Pn5I zGo=fB)aiukrfnj&oyM13t&7CXO6SMoch~FY2tA~72JC@Takx`-AveCt^sT`h*BFm? zE*T|KcZk}{2r4RV`lC~QlYUCf78Fp+J;_x6x;C8lQ82Z#MtjZ_l~kg81WL(2r-4nl z@yT(5993JF+z-p^qgK6OX-cNsaKfAE4--c{*W4RbePk$bR1R{7pX7;~D`a&Dm{brYw7#BjbP7P7}~)t-9OC_D7Bv80)b`k;waw$3OIVTw9C`N$Hf zV_S)&(Om0}<%DE-=&tAY{^~Wt?J}A&A8algd_Y{+nhVN-`Bc zrfkf1W}wb&HKI#()d(|BTGUeiI3e7ebzaYDnH<3CVI){5tRN%?srXJzn#kXj-=uK~ za`7CM^2S+F4{HN@x}WwanlIG;%kt|cokOJ}S>4T^tNB;fgzw{2`6SNs*VDBkss^Tr zSObm|#2v%2F@&pQs$NS|GkQmk2nL$r#?{iwRi}!;g!Vv6 z0c(Ic;_>NEto|SVTPR5vwgZ$pTD=pNhEOW7%6jDjYd9HuZ?7ZrrZfCaF$(eHGLYUx zNAmKQql{R`Vt=2B0k6Iu+sZG?_oxe}qQqh*kZZ$t?9IfZ_0|1-k^VyWs0Z0d8K?OI z_Pl(2(xbp^eO*r>o3fLal!n&Bz-(9T>pK9Z)hY?;+O)Q|G)o-;$JSbqq3F??=6YDZ zYB=S2xla5-&fN4bg=*(Y#>C0k8Pz#wTok*MG!??5q5%%DJ-6Cm#Q|vq$ag z!6_zVgqzm{!4HeLHenta(AOWw1$7K8?UaeLd}qEFB`>C<2$`KIAUj~~fN)k19_4IB_!C7J))-9CDG4vU+VjCb!3Epa(DcO& z7P|Va9G6+ccUbs%Y_N)dHp-KM0ti1?9k2XI2q3VKJdG5P7MNcJqB!Ja@P6nONcyqU zuAGs?6I#Y6p!AA9uG_e8fAazg<4*A*{vnvQD|fI8ghx|SXN&5EaX}SY$4uc+y$l#q zHYj36S#P8Hk(H%82D`ptvWdzYBr~aG2s;T?G52aWCFC_UhYbK9yCV2{t^NipNf@KZK%w{c)5Nd#?QQ}}5qw|J@ zQCY*FCDzbGqS>05lJTx`dRiwH3sqZ=>nkN!udV8B6o$gk!hDysCpFG_r*e(h0_wNJv z3w(v!AaSon@-Dm|FE{}AEn(bV?20QAvRCFB2*f}2!gqCP08H0Mq&K85nn{Ki0p}X; zOplgjDg(SqE+9Y;;xUxg;{h0C-rCtKx-DnN7hy{3Hp$c^U9+XYS-mdNIMe(kd`W?E zI24(|N20yon=+SlSK}gjtMG4v8p&G9=2vX)&woB|-WiC&-zY%l8#Q`BkR@2_DzY7g z0C-jeiejRrzOKSD#w&+1W7+NEOA!e9G<6rriKUQcjGF;Y1}~YCsrdh@;yS~c*tRGP zMS2fl=pa>!bO=aC=_p7MsUlL8W`a_bP$ET&fuUGvA|0eCT_H#jf&!sP6+#VosRAOM z$IbKJ_ni4LYp=7;p6@%~`7z&~eHNRv&@U>B^fZR(LWOki@8dYzE86^qKPi+)Fq z!vc*s?9_5nQ&P~2o&H9bah!$;N6qJTg21?no>Wa2;idC(Pvt9L^wfakGBSgP%s&! zQl>njcn1fc-log>DQQ->*s|J5HJII^sY#K8q~t&0K0eIf^x&HwkAiP?K1)ZR2YTS6 zZ_)|jo0nD^P_<#l99qUw4k#;3gs%_zYQ=YD&I#JS;}=;rNN1#EWO(Pb3$JhL!;ann zA*2>7>vGP%=P*d}gZ)8`PZ-LCVUO*Q1SJmxAw&eh)g){hDTx>x%zFX_*9l*I?m1oB}B)|Y>4%jn>GZ*s~v%I)Jw8jJKpMUjqO z6-26@wM~H_vY67L@6%>yaeGs+qiSy>+z7JPz4(*x3Jx3QkfdIDI6c-XC!rH5zV!1^j&8AElZQM z>n3c!RIcIK1GxsL*AEkpKW#aZvZf!Vid&JXN8n)wNFQi{qw0~al*(rr$UJZU=Xt8C z`SV|{s0qeaRW{j22nM5WUa1%s)!av$pA(rP-PXKl;*T=Ry*SM!7!s3QV^>_lC(Y=g zTYcl^>k|^w@}H#@VJD;ENl#rnNyUXW=Y`M@OcJ|!RM>LS=V|nevZEu9?6yZ{nJ`LV zX)XU^7t;Uv4J(G{ zO_F<(F9wOJJ6>+S@BTK+4x1ZZUyxNj;vq3>jC2i6=p7LQ?4LSstz1DHx?hU4*i}E~ z>kdh^FEQxiW}YxeUz}z$nGndMlH=>#fgE`3TyPl6Ix!QqN$r+Z)?0^J2a83vizn9x zNqee4C2Wp$(?zv~3%3}?F0->ZWW`uc*i-X7E^0)er<$_aQwdIr1~%)RwRA$hgV_9Tk6OsZXOtY+tWi>~=X2Lgia6 zc*`s=&w5vequlxtoWpnvO35B?r?rOEf)tygh@XvQLNWro1fl*NKHj>ZvwQW)1#pN` zg?2*?ihX0CEH__lZbmR?F@~jxiZfUr36U~OT8g5k4KEI{%u?(M(0TISRkSGVa;8F0 z9~iFG8Ju7%T$pcd7bVxB8LYTbEM=5Jr5#PafzZQ|Se&^9HBWD(mfQ-u^u!Gk{CumM z6ny#0^-4t>Q=I!f?Zl4e!5ivvw3cyqEYFSqM9nI0nhn{1OAfJ)RMVuRlwP%u@xBVm0e|q zSePOtWQtAP5}LouK#-$6J)h6w%CFwb9IU}nh~b}1IFIGEe~3s`T)?~!-|o9Ib@DF6 z<~>01oyGZCBB*9(j_e-}#GK!~Qp(AMXVYfW7LyQ*X!f4SpM-*qreFIku8{K`l4u%b zOtM!=#K_3QZxg;`j6DiL22oTd?nzp3_O*OODS^@j4qq-vV7Kho+U)(f*Y( zx>aLRtA-uuspKS++Oq`OCetR5z4(t~38fJNHpxjUcb!rnBVh{*Xt_}F@{Nu7^Tqzk z-_He%-Q<+3xoB5-t0A*X<>m%Mu0hcxy3Q`bPU*C2K%v-C`ija2;;ZzSCNanY|7ssX zZ)vOYa&xyHxP3)lK^+;0QkCVSA+&9acCTwlUbF_MZ5%sr3Y)``2x*EXq08suOM z;d7ZpGMK-duQ|IE0Bs~Ydnr_S0*`%wK}*F$)uPmc9+gD$iw~sk{ZXOUCdrwpRU<#O zusF{^LLx#e(5u^XBc+5s&rx(3R#vfgP*+J}*$t^vRPyv{V_uy9{Unt$Q ziU!Rbr?nmP<)rAZ7p~befB}!ASs2}zp)$+r#W8{E@k(VIPmmwe&PH^YtHm>wZ*D5` z4(r)7zUIQy&E43&&xv=5R%zyH{nfgwkwrDf6528h3i@np6<^r@p}^P|6KLHI7f|Q> zL=wu``gC-Ug4c0gOY`=!sGuXwjGK}Z^~_f$N7|Wy9i(piOTg#lz}7uadqpYTp0Tu& zJ3wB1f%qp|LnWkX2V3RI%F6Q}#jy*I8)C;6u+LZ8H@_X;y}e%+)-~j|SCS!twUbr6 zOj%H0O*OdB&AZLbrLR4@9w)zbmiUzCc$-lk`YS&$U z8S0c3=}(}?9w3(B%!v;PlD55v!(zaTC{G$O{uI#E&F*%BE(Oi<3-74%chzeq^Bf9W zWwc)UEha1PkY^5rH}6`o<$9-xxWQ8;2XHlsO4^={4NYaw3hb|a`kH&w4%l}PwZu+D zc{!N7)isNpXstDNJf65GE2Wjg{mUm7R+VNWk)@$M7|xGUHSTr7c0($}VD$NAPF5nr zlKS#IV@EGur)m8~b#?$(N^a9eD#L18WkLJyxx+ccF!$7CBB%<)ij{D?tC z%SyShF!tAB6hEM{XB?>I?hR4gw=kUWD$e0#3GLOuw8$7fPeD2TxXXq~+u*7Vje9`B zeX^O_hmiRu_Y*|kKwLpp@VDv(qg`8rjNUC>V|+4vdH#BfuUmef}fm`Fo#u7(Hn>U?K_FE zliW#qg1oBFvxzjqhuNKu`tuB-AJ@}$+N18XFJX9h%-hF&;U^w zocp>JhqA0O{>!}I;1os*mwP~el?$#K%$nZDW2(R@s%qS5(ynLec$J;bswJF&hwCyT zJ(n|PkF!JPcb>#=8Gm7Y<@&x5b4Qof-^MTGg{D%wgOrC2&0GB$peoMO3}(B5i>Qi! z|5iE8Gg$q{?VhG8IgHoRNIfmguC`w|tcxS1<~f9645hY!_Zn~Lv2K(}^Gy7lfIm;M z;D1B-23;mFYE&JF38ZA{oh_D8<=2Y|I#*J)W4Fb_UIO&VVe&vK>@8Ch=lDQGaqzW@;$ z)*Us^O-w@FF@UL>HD)ZUPPM3rh`qLM%+fFrtiwrjxnno`r{wms`7=Ltsp-;?izTAq zwTAcAx84bvLvJ`xujLbNx z4Pkv*!(WgucVbiE$q0I#6xxS#&`6LrdK89cWL4UF|MDDFE~C7P`L6f5e&mR(aR?)L zF*-=}WfJUwSyE+%1IwV(6^j~dMY=xy={AlP9?6XPcDmj-BVyeD^OYeX5%@=S z`pgU8Vg4$50FLMW4aY~c05f4?_*sx2d@;@hx{N{rE6G!e3w$~b-5AzW6sWhMSr)AWQ=ig|ItwLhcHfu znC)-j%9s%MAAk4%5L)X07AgbgH;6ECocs5eV8u8DIB+16>h|>D(zqD+A73GVB*HuW z7P5kzGfd#EQ?ou%cOq5i%0r~`JecVInUWW-e3v+A_U#PV>%j`rf0F>@e1B7#Ktdq+ z^qV=b8VF8*vjUWYK;=m_z%2-z4v+_#rkKFGNhq)pavGimS0>GXS7G2x3O8swr41-Y z5Mgg9BT+a!1qIS0i4+4~37CvMg+ibwUKZ$jLWCH+9&k7DH>3a=)Yh~)aQoRQ)CA4vnLuy`3M6F{M`SX%z||QE0G&$=wF)Ugg=}`B z%3~G^q~`xiznO&r`9=SRhWX4ymHZqO$SwIzvkcED i_W%EDg1>@4`_NK(#)z|gdCiK zZ19IgEQKVM;e!GLTY~`u6G=uzBTBA>r3SXu@HH_0ZQF6ePkvLCrcP-MXyt&CtBl8 zI2ywicWO8wRUWX&l9}W4lH)UT<0<%j(l1233wevM!-_fz|76_{OY^OCEQ4HeWgke1 zT=Z%Lhs{aMYNDJmsQ@3uVM*Y)O^T#8jLXRke9ss&QIC4~HiDqf%shkQ-0hBOsPn=0 zZM61To*2R1#}373ZXnptZ#LlLo(7x*JKzIHgRU}7zaxVv4mMKS44eyjh3GzH1TPcH zcy2H|*oOV|1Xok`jc4kZ-H@W`x-X#kBrF?T7;D9l>eZomayDXD3;#t(mdd2qwu<%z z+ge!1by=vGTFac&-%I3qNF?;KCr-x1P2?aL(vE{6#3E#O7Kj+O9|Oj5w0slB zbuj6u#UaYwoFmw_xK!j?o;{e|^l*l0YC+yEh}A9HPkz7nH`va*zd8DxZ@rE^6={FB zo29_AS6??>E~EhsDGZl-a6uXN<+^7zDnwncQHW zb1)(1r6-UOYP{gOjS7Xupa%#>P@{LUtq|pP+e2s|7Z>hnQ{C}55dNmD6fTrgRXG^X zMk5xB=dj#ng|0fU58$`k?J0y!{X65O=!xVK^wGemq-*T6}j8e{fyp6ivF=H0-3An--i1iZCR(wQrLTZX3(3!uc(ls|1|1^41alD1Y_n zk1%twmda_ZU7|eob(Fz1w~fsXV_^&z%|2Z{MmTnH32O#rZ>%)RP0vZRnGg(N<7FKW z%{{Hshli~sFZB&Dh5{yM8d$b9RBtiS=vI@8vDe%WkKLj4xs|pre4MG$_!>p<->kt9c za2G9Dpo9uDtTUpD#M4qLmdt(yIA?l6zMl95RAPZB*OJ6817Je9vhmh_OYKEQ3pg$e#kd= zS+58w2qL+ResowRR8(d<6Ql=(*kcX(V_?Zmm4#gVE=Cn5%0fEA#86m&00Ilw7SaL{ z+!N*e+0~n7uOq~w#>tk6yt!Dck5+8&UoVZA*j)~*)Me(Usnb0DPzo0hh1_lEHG-q= z`i>qi+USBOv6$*Z7gLZ~Ma;-ax)zQ%V^&)TgdrZL#ewL47*EPmumbs89H-{!ZhWi=h3Z7o-u%0pHduII({b zG0gWv?1NYPyGQhN=A8C0#V8juG=mbBf%kcZtXMV%b?5D>h)xDn+?jH};DCYzcL8CYeu^_}io=b91O0!EWBA4zKPe`HBNz&>|3V}A= z9~Q;P<&L`^i@c`xu%mL$DRapF@3<3lzNbiR%Eph?ZgZZazDRFAO2;=VD6RG+HT*-s z`XMaZyjcGpvYyH1xa0E>2Uu!(A4+K%krgojA2s2ci#MP%9KULUo;LA^zeR75pCz>w)M+ru?^=p$*4e31>5gM(vVyDpX z*7-K|mD?lPdG$(thCB{Y)!G5WjOl3cCT(^(aW$%}(jpy7y!?SlOvA!^S>)?eUAqvi z%I*y@Dp2f%f2yM@sJ37Sq5Pf~84|}2h?5?eb(%tEglv#kZeYcNNr}&@=bXytQky&0p;2y_R+cmkfUgKtJ?w<^QsY z7+*G#G&XnFVt05f8BxMt3GnB&{QfW7M1ZqIPld%Jg3*UQ>PNlqm^qMP&1k(I-?aVG z8JlXtCWuC;pfj>{mE^!wi!Gl@qKBM+zJfmEuoO{@6{(V+h|hJE*8f#dOkvx46+ePd zDKbxnYJ#U)oq$P$!;<8|{^zWURzDi*j31j5%@i&A=P%x1=go!#Zv=Q%nZXS{TW+1$ z@A-G!7x5Dh&yRk7euU3Alo0YmoEKgYxSZJa9**XlNjcWTH%thSpOWK3N&IPcTLk8N2nF8xf1Y0#tQ6`oojv4&F#dD zhO46h>Aw*r#qa_5INPk%b2?dVqNKj*Il^O|8Mffa`|9#-vHdwzD_HTG`>my?2Wa@q zP$7yLSRRKAC{&YyHqL%3utXyGtOuyhZtCXWkos0;6pyVP*fIkTT-Y*|wtj}Hu;(RY z{u&6Q))W@Uii8l2lZ>B(p64%|hdCFCB`QjhL{^Kcv@e5T_q zTP*jOG~#*Be9NZSe2wNEBkgYk$#+k}0LYFBQDzPU?p~uQ4MmCNHPBC+gzRzjP`??8PzSe%iSN z*{C3SdApv+Ht|>Y3l&m*g5V(su0jT0Z0(#?&9YH7RbOjH&~xTqb0Vg)Ji#TF#?F!YZA zYeDMn`+_q8@~m(+Izgyi#($|nT1F)Eo#IHO%cz( zs`w)iVPzu;o72xRg6kfRz78weFPK$8IGTn~mgv=UsF}4-aLUut~Qf|fN`QB>0 z!p7zw#Sa3(kbkp1Z9g!C4EoXyIxD`DvH@?A8W zKhaT)t>k}>E)Qmz^CspyN_=EJDv4h=LLXo$ydRcbE0v+aqT7=C&ryQMeTj)}-*$1S zb%K|>v3aR$Nng3%>XW~*;Q^vxflz0CIxuw{R!4nK?v~twaw=2iKU{ge=IDN0q5%zB zHA<~DO7EAeRGUY;3Lt}6q49i(988g{z1}T*$7RtUowzTBdcP>ngozZ3Og)M0e!set z5XT&VuHM|YHBi0+StfJF^yShq1l%%_{{7yr8n&Pm!lx3!ZipHHV@lgdzNa^uQ&63_ z`a12N10{uB69h+S@3a&IC{0vg*aGhGLowAqe}#WtVQvWcQ=+vb-ID?c78cnH0ME>o z0a~bv(7%g6)lB|on64UY4*Wt+lc!_!?bJSv)&7S&7=QjY#cgOG^=f@ElwwU1f@Va5 zHbH*M8zdB$i3B}xhBRZjr632IZrx}f@*&bzk7orie>l-rie3DPi}1XzU@YDRwFKmy5##?##FD26Ru#MD}NfK z5tuAq$9=H!Tkb~_T!>jyy^be_j;rrZmM_hN;a1wVHPTGP$ZVDs3h>)NBFEWxpM9d| z0yexwY2)CpoE|{b>G1-`xh8rVb+_S`3&P{`U+n0->HU{!*s{b zh0ps#6^qc4Vdotq#sXVQ{1U!0Q6P2Jv;upQRENURxu0Xq3x|&?Z@F#yw5IFmRkG$v z)O4w|jNG(&A#isVUSfk7sqE~AWeZ^^lSj6<9gJ!^gX|sQ_}OLB9rCm|6IM_4loGz! z;VXJ1o^%@XoVxOx``v`ic^Hcc&s z?)j5`Vbp5nK=nQ-x2mktC8NCJ0!{-yTeeg|Lsb!fdCoysq)iULdCGe3C-=+#b?4VSwxn}fKF}Z$udG5?r zVczA{U!wZJ6{Pi^!d`pdVaqiz$1$^b<63%Nw(@Jk+grk3W7zuIL+LFp>YgyQmo-1D zEIYg{J)K`^1XDM?>?yyDf@%KS%?SIl(qfqjQwf)0HBhas>TkHKqM}8UpX#0(U1`(( zkvW?bMl<_nl~;V6WO-F#_extCTd=IrEf3Dc@pox~;@HL(WO8C7pX%)>vuJ6w?yl-* zVY9K|o9msu=ynP3)}Vn3S8lU;i(&urM|x4Qan@i*^KoJ6M6K+s^=Iw!a45BCME&~) zg;#IX4p7u)vC%Uu`1)pVNRpo^{wlK(@%)3||2vHrj{dgvnjpEQ5QoZl8@>Q`Tyger zW*>|tJ{uekfQzu4d0T?a4ZR~y);H||zVnAiS9Li2H66W?%`@nCkXL0?_8ImWc4BUkbgv91o3du*oNcHF-6M1; z&|5JV4d*9Q$VBI+sy)RhJcF>zG&Y=cdD4lCQ;%^B`8sVJ51o7@-zcg_24|21-nxWg z=JXW+J&nK#A|nJXS47Q9A@yw`3&G>q=9O&^BHC;WT04y1AbmU^ti~CQVqABvaVY!^ z?}5Q9KilKbIq^3(umZp5hng5{##*BUAoOASe>Psul2||iY<`&F>(#R~ACY$iiC3b2Pl(ez*Bx=D~eCf`HRyn$-~KYm5K zer-XOrJvi5E^HrNE2)j~DZkSqmf2L4kNc>{+_;(W>t7He1+HD# zT22wmE#9rL=1*#cjhhXY#_n`2xIrL{{+6U-GCkflEj4UkI6W}ks%6BjTZ9lmtw)3E ziI4m7`pF$a+{boU{LL#&S&?=EFu{Gs7jE__Oo=N{epkUUqmG49#zHP)4*C5j2qrDi zEXy!31Pty*<59nWzIgQvkCgixv6VIQ!POhyCz|&sShkU*($QFGPCb+K?*k;Lh&Rl4 zjWufiEolliWh2@}9Oy@P707bS1c5pNSSXqQfPL&t9-lQK59(OQA3LjO#18Rktw6u#SzF46}%g2(@1pM584UD!=%P}TC+>vgp19n z{qghOzYsjhUm%?Zb4aL!(&k1+zE{MN*TWxQR@^l2Hf~^m@g}30leXq*C%AR_Sb&Vk zVkg6^z2}gl3W5247Zc9|*jK^AlgtVU+ZKGp$me6P;S3A=xusy8ax#Y*Wt8Kp1j6+& z3=Lgux9$m&+pew%T6L1vPxj%RG_#)lbj92>L#KIAyj19F!CNZZOr9{tC4BrqIL z`%dX?k3$SEw1Py4A&eIdq3Jycxy+@G6E2r4RA03gR}VXNv9`H@Wh3;fzTEF7apq6%wN$6)i--FS z+IMlqv+}31_B;aXi^f`Q7vtc*B~7->Ur!}HM)BnUVxaQ)bL;a?TAj3y9#T2uee^J5ohGslCH8ejViE@UFsnirgXB&W$+j%+hjvE26+6*S zTMl$sfpw)N6M1<|b0W0SQ6c;?!G$ z@rn0bBsGYhxMECJx=($!IwxK(I>>d$@c#Q%nKhi!^%fWIm!j)>S~+aHZ-P$2{^o03 z(2eaYQLj>-8pLt=0?Qzl_9sBVhbRQ}A#;3u7t*{%M~puxpDMZ&TdFMohAWOJG&qa- zAv(x(M~BG5FENOsXu};?PW##tw!7B`;mSXCk#x*Wbh__>J)?Y_x={F=?r{(2pTjnh zolI#ARMSY3@9*?MVyFv&jJ98zrFM!XzcZZbM1Y}usOAs;BAGukn1{!T17A8ozY+Tf zCa`$xiMs{tWrYC;c$0&E9Ll_b%HUm@>m@0*^ z60ey`T-+j$OJ0gs3RKtH17i$mM(Vbrsk*OmY9Ix5SOsI(>OA=@kBZ%bMA$H9jMP(y zh%Y!ou3F_4Dw37AIp<0FkRrfNX7n)ywaO@`%19+4e0x+8M@0`^O`-)ut2n|Ys{-@C za%*GCyHLZ zab?Ca^+`6^c)}m_j>6f5tnz0)mYAqhFaF5l+KeQ4Z+V1iB4KZ=hGE z4W@qHd)fW4XW_w4Uusu1GiRdq%ZRQ;Gi3%96y4cAk_s^8)|`73GqgFR8K_;)`)NqAn&=vRs!_HE z9ZQrjY+sd(i;=F?#9MYU=X1-`V)c*iAuazFs=Xmu878=4`No zU4!wq8cv02z%=zfZeOZpJ5s4w>#k5f&pF9{DUp5N3x*X(lMk3m*Jk+DRc2TXYM=Kq zhF=oB89Luc_+F$G#MPrYK0mE!zeQk-8&J4nE3|n@abjRSe234l=auM*P&4GSI_0bO zoMW9G^C>g~;uPc1p0SV7Bsn@aj&FEK0JyJin7YzGQ@6)3tH70Vpl2)_v(Qqwp)wOCi#~RbxRWQ-9ywE z+e%G&805L5f9UJE(;fC80D7>weaPl=kLxL6ztg&H>js>0)EIf_|8i?`51~A}F6nGO-+pEgkto<8m%#+ zVVlW=-<_M<$od-d43QS+zNwqhSeoFTnDML_L-RH!?R2NcX-}U{>*BN{S~U_WiLw0| zk<77*VKj!XD_C~kPPil@7|2z;l6>RmmG{&n4F8I6UR4uK+tkiqG5GM?mul-)lscnSGV$uT1_C&R11T% zC!>?g9H#C!mT=S8qUk7|d`vZgsB7{1!U~fs>bRM4{`L#{9AjF!y7GU>$p}2J&^&e+ z2b#&Syo`W0$QQ#C^WWF6QTk-?1!Yle>ug;+SEha1kU>#V7JIZGBq2~GxmQTpBu#9W zSM-72%J#KVJ(sE8`PvetYj&dZBY%Z|_BhUK)=CLn5+*F`WIi z^W!kq3%$O(gW% z;5#w!eLtAQS6UKXa0;K;#D}^ zvZ3Ix!CO4`Of%#ZA9^B_vaCFZa~n%LC42qdcw?TSX_d1qLw-8)(W4E0(Lx@pWlGGO z-@aO&N_o>{{Z4vI(<}@Nw{h8AwTaBna5oE3lKt2>Px|2pm z&2TpT&MW3^J1iS`T-w~6O(VsDP_|i;-Pt6uSC_T^9X?mtHjVF+g4nifxy1+iqgFGf zySG7%tEJA(RJrM;BA6h20tso-aCrrkXYlwy1D)crNPZWVC2PapW1E&-V_hcpR|XA` zs4OaLF7JUhPDAi!ihwOrJgg?W>FFSZx16+& zGYPv)v|<rG(Di#UwtddEW7$_&tNxB8o;j{3T9k2vX+s zz_QqW@P2HsPxZcgzxQw8 z@&-!!7Hn?Z%N3-Qtkp!I>n}Q_w-sR-y_2+=5(&z~f6JF){ zOhao=c3S zKvsfi<5XcnF$s#qsOI4<;#GJ6|YsusW7{nIZiTM6d$T`L`+pHi$& zMSr#KbV-%6I1yESl*Znoty2UP0h*C-8p6!+PD8Bg!+YM_eJ~h7rpGH zZ$zDsM^ki$l^~JmyZU~0)%fl#rg%|e*phm>M~ZFsu3J|QI9CIBtSgIgf!iCS24RnP z(m$dJpM~j=Rd1lx;)P-@DgITC2E+r6uiZcL?=S9kR1u_m5(f4*Da1Bdc?u!$ck zfPuj$j<~@f&cp@Y=w3Da-_gB#c$g8C3V4`Nlp6f-M=(GoZQs&1cnG;>h+#={9#2LX zrW4F_DCZQbB zGrh?Rf=+j9`xLGjY9NrUUC|gL8|2ngaB5LOpk5IO28#A8WBuNlJv-O4K6&>j>@Hcz{b9%rAM7?2>~1;ic05`yG1-)WOocz*wJEFv z)+OK^y%vVlNN8~I!y_8%IjPLSq*!VzUf_VhdzfsEYNptTKM?#5<2f2Q2xt$`Gms|^ zl7CM(;d(|1Qc_iXO7ajIuNyVQgO*wFe@wIomvqPa%W>fRxLDU0(Vp(|Q|OKe`(+PI z=W1}V2#u*vB*}#cvF-@W1`?y_<=KHusRC$TKzM_AmiaDo=Kl@;WLMgQM|5Bhnm!FB z6~*UUZ8!z{Cp^qo>~|FrGEN~_UiHS*1;#(6grp95K`z|EPvx?f^#`ctO3V~t8zkw5 zqb6;{Vf%k5jEj;bQ=41CcZ|4dpM}4O|94cfhLA+=3jqd(``=D0xl~jL5M}WHQRFM9 zN2>>yg))pI6GJJ%#H?`ZpdI$B;d9KP`iso5eNMb+en^h#LuG`mNls4|kzHYSYCZRb z=Vm=~TL=I?Ae0BlAf1mav=x=9$8Lqo=y;=C^f?lQWk&IWRi0jZ=?pn-gG`!zhjv{j zZM2yPwD`;5VYZw%^VYC{-r4GAQuG=pP>=0(Gk>{ZsfKrZRKKsri{6%d8&arW%|hpG ztNx$A16FHOhU%vii1oJ6lr_jij+~)Zp(&w;c+2yxcz@N+Yp#}tFFov)yd2;1s`WYS z{%E$Jj`4R_tj@?^`fs+QE-8f}j+)*iR+Xz@>+yo<7SBY8zdf`YK1Z6?{ubBHh zFmY>E5tgnuII4UM4#bWRmTM{f8dUJr!=z#)J{Ilf5`tJ=0ZCAH2;gTzcvb}*up0z; zZeLIovm2^@?yMFIYc|aSdSkz~AzMjFC>;*cB31O+Oh_#TgcpV|{#R)utyK^l{ zb465cBpZkBjiWzlp>~S_gv2AZG@^cX4MZ=^vFOC>H5sGXLxCI|ON#Iz*NopkDA_)d z?Hatmqalapt0QkbJ-X?>;>IivQqY*(IlHu`7|~(==4h~lH*fg8o1=zsUi|MvB7q%w zKsXg+fPpbGfPwu;M&%_;j+Q_IsG7P>yyzoCnC+0Hf6$poL6|0^kmEp5&?7Eg$lWK! zOrh;|v%nfO*J8HR*6P~+7l94Vf@&+st!XzlboJ70?SIyGZDV)&ZTS0_D*QX`%^W8O zeSiJ?8v1vq>t)Mz_Fm{T&*wD!U&jp&D#QM77#pTjIkg|txC`=$WljWHK;;6)_-XTx zR2S*PbE1QMew>mYvk8rv3sZ3Sc7keIP6?;CTR#Z*no2Uuot+cPZhZ@l3Y=vE z({1#LO3w9BOS(E#y|E_rQo9)zyqpsT2;lC~4Dm{M4Jq>{OLa)5C+^&0W|3}bW2-H^ z+!J;tf0jJtfYqm-c8K`H0IN{#nvxgd@9v`7#3iJL#Cs1%9U_NeKWdL+@$!jFG_;X& zV;Ag_%4A;c(kk^JJ`~T_tDKugHX*tN`uIxBtP*VB3>KQ9&Otk+cMh?;4E5Mh=f3_* z37q$#ct#i{(*u5F_}~ty@tiiAwp&Cc*LJpBs7H!_k|@ziau-~kfdmg%>OP+%+*C`8 z1Tz9;C(^NP-*N6%ZW`KkaK-TlRn!Wp!<`@Qks4j?j{H3{KBb`gksEl`nCueJZxnyw z!%6mDe(AJ1!vW$HSYk8_A%YUFAw+|P?tU%n57gLt^9B3^nav2v%g(7*feSVVU3V3- zO!j2?LieZ3pRHUDK$nw&*h6bAV#{V5hn+*GliYMGqC9jgHhwyuh6>q^&a$0eqSvWy z8LT_(K6aZY&U^~)o}c`eby3q|bA}~5Wz9^L;-qzamWDvu{I?Pg8O(c%^w$EF-NN=~~S5pa%#NLgwE$~`97?YYaH9Kq@7C{4rgo!gL7Hf&(zV4NW zkJ3f5cBFNH^(3Jct$>B*Tm|8zUgAoMuVY)0JUZdC^J3jbZwokiXb1VU!AR0EU6vY4 z*+)f1FQb+6VfEZLcW1WEx=by<;}m^O&G^J6sitOyNv?a#Yn^nP?_gSA9!Nv=}wk$LF&n{hSA@;phy)TRM2d34U|Gfy1o$hQ;(Vu2c`4;NXm zlhZ-%s<%a-gSD=UcFU5%@8&0j2p+hqLcIHv5}PVdMmfK-0ds@j+Ru)3MF^Ww7Mob5 zDMK3P&>Gt+kR9U?$`)Hc|5}Xar*jz3qR_L{OiJk2fwh$-4W@G%zu;Z7Q0FCc=w|5P z5cD08=rGo{qTH;JZaXw{^cf0jO(y)piPz+iOu-F8x8x3EU53fg~qnR-}(=Gvc7I3+)QbLEZb3;~feu7cUEY{sXKijeF zVJ${UB*{dA4eePwD6=aya5HS)1WefN+TvX+vMOv`N2cTsSL=HF4MgF#)_(6+As4zm zcaR{RWjVp3BeCh=#Ej(4X^33FHG?%DB0xpTZc^#uy#zRlN#ZDK)wyEw2tXm@(_K0&iKy}VnnKX{*bjEciAk_C5}b1fiyNZ8|B7$9=s*(YmmRmlH&eRBB(h-dLgdf?2|8 zSW~BfI$u6O%l!)vFgO+S4WHTFbCh*0p9z1H%)KeX z&Sd0O8poeuz#M>&N`@b}hGoNzqq<_I)!d928kFotHpgmR4Jl~~-3Q(u4e?vi_mRx+ z8`h)kc{Z>Ob4oAuB52LD+ox->i}g;s-iUJJoqo?i`ob(iQ(=(yc%aFV4>Yz>1MYT}o;=?_^ z&&jc6(${hA8g)2)hXn*|rczoyhIYlsO*>WFj10D-UZ#=$*g|jY!onI|KJSYPy=EMg zGz-ISU-^O`*w%zVzwTvzJWFmNh|nw}>0}l_3JxoUfRjp6-bB0XKfY$Au+v6i$b|`H zY|;1jZXc8#GA7=Xr5Hw6WNB5#fIr1*H(9b;-ajyR=1*20R0Rws5*BlxEc7}RYcvhP zvz=mTpGOeRJ_vhJpQf36TgwhS$So}|QD8g6l`#>lcJU@z-^rmbKMcH8PH*l8c>$vx zqCm)V=*URppX7OQW+u0g>flsW1(F4PuC%u;?!#!*Abz@Zrq#Fb2o5KZ`span{@)NV z|0*AgQ4E*$ZXlN>7Z_54D=sg2yZc`HD>Z*cDO#f2R4MXTzWJD>rW5Z!^)bue?x^K= zvu^%jK;1)A5?}PlNk%j5#06TVbypNYN=HP$d@lYUB1X4CLfC3v`nOYTJfZT3hXWjj zM135o;qp6p-rr+PHXpxc>Tj!g|0MyT`$Xk}OK+2YQ2m($i=OYs< z$CNPS7Y`eKe@v3|_&M*uHLzYmP2t{zf7gu@hIe6ur062@qs?)TA*siTWv}kfcDS| zK!xGzszMroZI{%4A`Z3*hf*Nc;oKkcFWY$1*iB}c^6|jU zQdi>3<@az`aW{H69D(yCCW=LqUz%-mD%O4>wUAXLFXC8sjq0vxeArF*YTG`$>JRfi z0S~!cxa;tj-h1OLSd;JT3|BwVVev_f-5fF{+j}40xql&2;IPuOhul?!R z2q<*?(n@AiipP`;uz!PKFHpywDHvN*q7hEN2F4XRYRARdEwa&Wip!2hqSr6YKb&1` zT6Q|3CSagOD^O+XCYM?p%IA^9bKUQK05N(N+<_(BJ8^;*o25ic+sMh#$f&RqZQY@1 z_odtGgcUP!yCuRk1a-R;^ZTM4D2{t9_pHCiAvK;Ox61Ena^8?=EwLB0Kc{U-KvSU^ zC1VPin}a!7h+SE-2br!8C32kHSJP^(qOSS?R6z~(Fq_dbuGUPcXo>NnkKmm#8H}S^M1BcyM>F6z z&@SMGd0GpCPu)>t;77|6Dn21l% z)N~H{ut=4%J}_w+7@b$7658md^p#QN#Wr?M}L$7NS`QK8@8_BQJOBaq@TewO z?03~w`8teD{qv;U*gs(jp_d?E%x z42@*cqPz)^fd>PUndY!fa!|bdBYP3lJPtp9Ak@w?>M9!bSF}3-D;$5%tC`sc-~^0{ z>*?0(OT^q@%pHmz&hYmRhA)1eXS-3o!fK}{azeHG$3EMtm&_aBZBOHEi#<|K8`jS! z_5swyyLh2*+|#QSe-yHq2U0)T6T(hVyxzsXSiR;})jlq+2rtrRloPiZI!DgrJmUUm6Jq}duH5rMu}ZTv@XhSK4jKa{r-Z0rlk zUfnm8`od)#0c5Og1Rwnnlupg_YVxU#8nOPASm3E5n-p+`f~`ADgY z(9F20)1a>gm&VoRHQ!j&F|4(|1+f%0u-q%6yN-5`IJb^WFmo_F4-!i2N*p0OE9;vh z>69t7q{*{_WNYagYoRs&d_`JlE6hh;qC4mFN(LU)(p$s`1xi#)x@Fo=D%)|D3FNr@=0)wt1~Jb~*5k7iL?_cW{Kfb2riV?uj3ZQ`H~(5Sx8 z@(6oNNOz^LoFt>_EV2FpLSd1X@N)v|;K!yi zapFuxUD-7`0szua~YNc!z-yv zFzwt;DKM=6l%M2|#hV`3E5O*44SK*BHmVVndOoQ|yIr9nhc;?i2RGvr>>4YBJ^4)^t`YeDcRN1!0f(9h3hKAQa)1tlaSJ-Y z-1%L59nw)>QmF5Ps{dRC>dfqRJbCY#JKzKAIhNSO$P2FTlg08&9Mp{Ov>my91{))D ziy8byQ?nw`BsypnS$XEtwD2pDic;AFavxs6zUm zp}koQ#NGDgOl`dbol|sgidy|`9qE~v|5lRSL?1m6-4jfVcm$%o;6{A8X1wV1eezhu zR3e2p30kcy*<+_XZlN$FuV~Wgh|%m?!!L3TACuazm_sYox5G?{mOyCjA52|QU`*1O zrNVGH0~=ySZ8HVN^(6RyRW>kHN2sP`ms%(S0)6bkF{@(U5wwzRoJ92-yHqZuyrEru z;VF2DVpwEI%>PAY(Jr&pyh4*fS=aPke>4e5fusj zQII@ma!pLDA^mwD#E`ezsD$f7cf}gN1HJeU6{`!ZYdQan!^@Y|Hb%&dLB@C0D%MZn zlcQ(R02vqRadm&P5T5kMKcvd;3CwRc|H{Vkdg8eG6gBXM!xA)G2y!OBcXD_KE7KEz zl1Gja`!9RxBjHqV|F4VdfQD=9!s7}e7@ZgsW%NOmga{*QB)S;A_eAeC-i(q6qTldz z(IO!sAxe}GEj*%)-bFAvd4dS>Pv(E`{m*~yy6f(}&$sv8_pE!?I%}PMK3D{HCA84V zE~fWJ7x&+*m=;_#>~nSL4|EZsJP6?v7KYVS!)Z9IypZl~r`9_J2^yhMNXNOzJA1{Za_ z6>v8PZWDpafs`YR3~qGyZ@u(?)M6Xo9lYV4v7u1iZKc?gVUeR_f-&rU*B`);qEMDP zH+UiRc&CYqb2}gRg>l~7`HE+_Kd&gcjZ?Ng>XGI3>m{X%X=q4xb|pVVMNZC9J1i23 zTQLho*(@&ip$;5pCv)<8yaisjG6of7NsQ+lP{t_*D@x(R*AIky?|b=-Yi_G-=y0#h zk>p1H;W>@1(lKuU$TT!61mZ!cl`eLdWjm0J(}kI|hlaLGJ+b_EN6*y}cP3SA3lgHcytA6-jzbi^OxnBiY=YEPESFY`k16Q!W1B zZ}WQh!84d^ab7XXuEZFr-jOt$nyj^dG0pcx`{dq9_6MO(sSo1%X~{A!a|REvvWuxn zl9=n|Uw5*FUwDhH@)}omh&#FPnUy#c=XA-~?LZ4}Key{L7gZ9SS~3}ltp8lCcP=pY zT9z`I0P5Xj$q@|=+EpQHfCgj9YmHvc9-KZpFs~ZERq>QQ99Q?Mw1DdPJ)e2z3U}m9 zT2e3hqJ?@BJHcaX4oV56GRH_Hc2PscfRS9T#M*nQ!r7!)S8K4}Y^Rz$HdHjakw)#P z(t(1~Qty#AhWC^@Z4Te^hi8C|0<84zJ*cUAjnZ3JYMypFN2B_yt@dFtdqGZ!rh$U- zk3pW=idJL#-vvx)^V)FyFM1U#rUux%#CL@!e(JnGeduO8G%ggEGFBp+&dFn$L3?&H zAQNMbj=+V1R=i{;YWY9zhxlf$xT23&;p zkMTv|^-9_sZFD7f~qVUUOLk>bckM-SEc7)Z1#ViqwaGd9(-Aj~n9S7;{uf*STcG4d1 zh|-Hu$%xy3N!2&2azpoAuW`cSfiH38Wy=QYZ$w5IybfDizwh<#O@95n*E-qpZewrq z^N_OBenePTui;XC3Q{OUqWU%@WcOuQxsXb&+s#_zCn<#&@VVCM_x(a#USLWa?jawh z)VsY{zFF7{HZuM7j6pyDQK1zBtgm`^szFWv z7h@*$Vs$gy>oF-ic}e^9jwg4K{%r=*(gs(gD#q7Wy2~V;Gac}XZWYcoqiBQ8rd^ZA z)vY1ZS>02@W`h#Uqw;b`!9VqtOT!-|%<9X=eg zFLhk3mu+$`t6z$ef7&p}ASkOGWrsJ8U~QwHW3;SB_fTd0rrfe%iIvv;Rxmrrze9s0 zrB`6$qTk}>`=s5~^^?TKA{w%i4!sOZ$S@8DW3jrX@qbdXF$Uf4WXopWMfJ@FO`7fJ zS|K)CIiHm}fkpH`^D8ZVbKrM!qQB_m#4dLO?z;9#G|Z^6L3Oit5if><9=t_0H{j-G z5E{<0KHYlJ_1Jqt#>0+iMz5l8pFCByW}En@PjT-W%Tv6YlY$FEeNG{pQ%9}S3XNN= z(eXZ~RM*+bI{52sHoo#UupkddmEXkG;y8QWYS}c7+a7RtTAg)0{d&>E6D=CHn;is$Y~$wiQpzLV-d%8ck;ZSq>MaRF+9Ld3~Jt|3hk*Jsbp;r#yyRj zF#kbknt!cNP<}QnfOxj(+n+n-{wbK@E9y`jN3|ZTe{cKBWCNOfVmQlS0j+NF}!> zv7G^D$KZ_B`jPGl@+B{4?W!_wN}a3Rb)fk$acEKyHIUIF-ER0(*h1x_bkPV*)|teIdxCk3OTRWw?p;qE8j7z^w3cf0D)ghm{A)QdJrY30o zajOp7bxUaVPOIyKZB#sn=dHajw7~P^tGz?ccX>tb^Mik$7MgJV$YCnDDKa==&nsr% z@y)5R4+BqZ*icaOIj^k4E9ZVpzGG@#3|fT#7IXei!$E%j@AO&*44W#3)5hN0RKBrw zx$=e#vvR4Seglugurm_{K7C!+zgAhc*4W`IEwO54A`U?RgL^+npZCRKhsH zTe3Xs+vb2WRfkgKmLo=AW1>;y!EC$=j)XO4V;r3ik9nj&d8A1j&VeTyBj_Q~?bnp9 z+0au=+KQ#8Pqvrc8{b`RR27HU`5_o85Z+V^hwJyscoFJ>BR#b|k^$_CQbovY`R11> z1m{y9AJ_FSebqAlB{7GL4twf|U8Z6envXF?iI{2AI(it$7#b01X&}tS5MA`rM zowK)qw0lJHWL6bOcKu7F0Ila_fDJz|V@?;)@(0)E41rXCP-$KcX!i%hgRg)C3v}Rz zc^IG@L{Qnl{dpY#@*6mq3I`{`SbyaL#w@^qGz`(?89_^MKmz+%qS_xxO+>86&{6-L zWKlROiqOde`hJ!G1RfE^?$4?~Pb^U^OJMjl8lb@<40s<;H036FODHZ~?mK1@#e1dqL7-fvm zbFLWt@LU!YT}A>VB}7ofvNUk=f+#L7D*_uYiP3sr(-VTzfzB&1K(T^o;;P~xnuKcp zolHn2p%Vcz;l+XBb}+e15cI$!frVazhyYs#{yQM!co;x70Pf+PfQdoSVpSd#nScuZ w|E&DqkiEp6nWHb}B;da$<=?F+{O4J~(cC2_GD0yC1R_ni)(HQ!%J47kKb!F!p8x;= diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index d89a20d212..aa991fceae 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,18 +1,5 @@ - -# -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# - distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=11657af6356b7587bfb37287b5992e94a9686d5c8a0a1b60b87b9928a2decde5 \ No newline at end of file diff --git a/gradlew b/gradlew index 3a163de711..1b6c787337 100755 --- a/gradlew +++ b/gradlew @@ -1,7 +1,7 @@ -#!/usr/bin/env sh +#!/bin/sh # -# Copyright 2015 the original author or authors. +# Copyright © 2015-2021 the original authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,67 +17,101 @@ # ############################################################################## -## -## Gradle start up script for UN*X -## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# ############################################################################## # Attempt to set APP_HOME + # Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` +APP_BASE_NAME=${0##*/} # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" +MAX_FD=maximum warn () { echo "$*" -} +} >&2 die () { echo echo "$*" echo exit 1 -} +} >&2 # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar @@ -87,9 +121,9 @@ CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACMD=$JAVA_HOME/jre/sh/java else - JAVACMD="$JAVA_HOME/bin/java" + JAVACMD=$JAVA_HOME/bin/java fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME @@ -98,7 +132,7 @@ Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else - JAVACMD="java" + JAVACMD=java which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the @@ -106,80 +140,95 @@ location of your Java installation." fi # Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac fi -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. # For Cygwin or MSYS, switch paths to Windows format before running java -if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) fi - i=`expr $i + 1` + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg done - case $i in - 0) set -- ;; - 1) set -- "$args0" ;; - 2) set -- "$args0" "$args1" ;; - 3) set -- "$args0" "$args1" "$args2" ;; - 4) set -- "$args0" "$args1" "$args2" "$args3" ;; - 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac fi -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=`save "$@"` +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' -exec "$JAVACMD" "$@" \ No newline at end of file +exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat index 28690fe08a..ac1b06f938 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -86,4 +86,4 @@ exit /b 1 :mainEnd if "%OS%"=="Windows_NT" endlocal -:omega \ No newline at end of file +:omega diff --git a/jenkins/check-for-build.jenkinsfile b/jenkins/check-for-build.jenkinsfile index 386e3d1ba1..c4aedb04e2 100644 --- a/jenkins/check-for-build.jenkinsfile +++ b/jenkins/check-for-build.jenkinsfile @@ -11,13 +11,10 @@ pipeline { } triggers { parameterizedCron ''' + H 1 * * * %INPUT_MANIFEST=2.1.1/opensearch-2.1.1.yml;TARGET_JOB_NAME=distribution-build-opensearch H 1 * * * %INPUT_MANIFEST=2.2.0/opensearch-2.2.0.yml;TARGET_JOB_NAME=distribution-build-opensearch - H/10 * * * * %INPUT_MANIFEST=2.1.0/opensearch-2.1.0.yml;TARGET_JOB_NAME=distribution-build-opensearch - H/10 * * * * %INPUT_MANIFEST=2.1.0/opensearch-dashboards-2.1.0.yml;TARGET_JOB_NAME=distribution-build-opensearch-dashboards H 1 * * * %INPUT_MANIFEST=3.0.0/opensearch-3.0.0.yml;TARGET_JOB_NAME=distribution-build-opensearch H 1 * * * %INPUT_MANIFEST=3.0.0/opensearch-dashboards-3.0.0.yml;TARGET_JOB_NAME=distribution-build-opensearch-dashboards - H 1 * * * %INPUT_MANIFEST=2.0.2/opensearch-dashboards-2.0.2.yml;TARGET_JOB_NAME=distribution-build-opensearch-dashboards - H 1 * * * %INPUT_MANIFEST=2.0.2/opensearch-2.0.2.yml;TARGET_JOB_NAME=distribution-build-opensearch H 1 * * * %INPUT_MANIFEST=1.3.4/opensearch-dashboards-1.3.4.yml;TARGET_JOB_NAME=distribution-build-opensearch-dashboards H 1 * * * %INPUT_MANIFEST=1.3.4/opensearch-1.3.4.yml;TARGET_JOB_NAME=distribution-build-opensearch ''' @@ -105,7 +102,7 @@ pipeline { publishNotification( icon: ':warning:', message: 'Failed checking for build to trigger', - credentialsId: 'BUILD_NOTICE_WEBHOOK', + credentialsId: 'jenkins-build-notice-webhook', manifest: "${INPUT_MANIFEST}", target_job_name: "${TARGET_JOB_NAME}" ) diff --git a/jenkins/cross-cluster-replication/perf-test.jenkinsfile b/jenkins/cross-cluster-replication/perf-test.jenkinsfile index 7a7b1d5ebd..752500444d 100644 --- a/jenkins/cross-cluster-replication/perf-test.jenkinsfile +++ b/jenkins/cross-cluster-replication/perf-test.jenkinsfile @@ -116,7 +116,7 @@ pipeline { icon: ':white_check_mark:', message: 'CCR Performance Tests Successful', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', ) postCleanup() } @@ -130,7 +130,7 @@ pipeline { icon: ':warning:', message: 'Failed CCR Performance Tests', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', ) postCleanup() } diff --git a/jenkins/data-prepper/release-data-prepper-all-artifacts.jenkinsfile b/jenkins/data-prepper/release-data-prepper-all-artifacts.jenkinsfile index 5db941a297..4e4b599eb5 100644 --- a/jenkins/data-prepper/release-data-prepper-all-artifacts.jenkinsfile +++ b/jenkins/data-prepper/release-data-prepper-all-artifacts.jenkinsfile @@ -9,6 +9,13 @@ pipeline { string(name: 'VERSION', description: 'The version of Data Prepper', trim: true) string(name: 'DATA_PREPPER_BUILD_NUMBER', description: 'The build number of the Data Prepper build from GitHub to release.', trim: true) } + environment { + DATA_PREPPER_ARTIFACT_STAGING_SITE = credentials('jenkins-data-prepper-artifact-staging-site') + DATA_PREPPER_STAGING_CONTAINER_REPOSITORY = credentials('jenkins-data-prepper-staging-container-repository') + ARTIFACT_PROMOTION_ROLE_NAME = credentials('jenkins-artifact-promotion-role') + AWS_ACCOUNT_ARTIFACT = credentials('jenkins-aws-production-account') + ARTIFACT_PRODUCTION_BUCKET_NAME = credentials('jenkins-artifact-production-bucket-name') + } stages { stage('Promote Archives') { agent { diff --git a/jenkins/gradle/gradle-check.jenkinsfile b/jenkins/gradle/gradle-check.jenkinsfile index e33a84fc0c..2b779510f8 100644 --- a/jenkins/gradle/gradle-check.jenkinsfile +++ b/jenkins/gradle/gradle-check.jenkinsfile @@ -2,14 +2,25 @@ lib = library(identifier: 'jenkins@20211123', retriever: legacySCM(scm)) pipeline { options { - timeout(time: 2, unit: 'HOURS') + timeout(time: 2, unit: 'HOURS') + throttleJobProperty( + categories: [], + limitOneJobWithMatchingParams: false, + maxConcurrentPerNode: 0, + maxConcurrentTotal: 50, + paramsToUseForLimit: '', + throttleEnabled: true, + throttleOption: 'project', + ) } // gradle check have a lot of issues running on containers // Therefore, we directly run it on the agent node agent { node { // Must use Ubuntu agent with 1 executor or gradle check will show a lot of java-related errors - label 'Jenkins-Agent-Ubuntu2004-X64-c518xlarge-Single-Host' + // The c524xlarge is the instance type that has the least amount of errors during gradle check + // https://github.com/opensearch-project/OpenSearch/issues/1975 + label 'Jenkins-Agent-Ubuntu2004-X64-c524xlarge-Single-Host' } } parameters { @@ -42,10 +53,10 @@ pipeline { ) } environment { + JAVA8_HOME="/var/jenkins/tools/hudson.model.JDK/jdk-8" JAVA11_HOME="/var/jenkins/tools/hudson.model.JDK/jdk-11" + JAVA14_HOME="/var/jenkins/tools/hudson.model.JDK/jdk-14" JAVA17_HOME="/var/jenkins/tools/hudson.model.JDK/jdk-17" - JAVA8_HOME="/var/jenkins/tools/hudson.model.JDK/jdk-8" - JAVA_HOME="/var/jenkins/tools/hudson.model.JDK/jdk-17" USER_BUILD_CAUSE = currentBuild.getBuildCauses('hudson.model.Cause$UserIdCause') TIMER_BUILD_CAUSE = currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause') } @@ -68,6 +79,15 @@ pipeline { sh("ls ${JAVA11_HOME}/.. | grep jdk-11") } } + stage('Install jdk-14') { + tools { + jdk 'jdk-14' + } + steps { + echo "Install jdk-14" + sh("ls ${JAVA14_HOME}/.. | grep jdk-14") + } + } stage('Install jdk-17') { tools { jdk 'jdk-17' @@ -80,11 +100,16 @@ pipeline { stage('Run Gradle Check') { steps { script { - sh("ls /var/jenkins/tools/hudson.model.JDK/ && env | grep JAVA") + sh("ls /var/jenkins/tools/hudson.model.JDK/") if (env.USER_BUILD_CAUSE.equals('[]') && env.TIMER_BUILD_CAUSE.equals('[]')) { def pr_url = "${pr_to_clone_url}".replace(".git", "/pull/${pr_number}") - println("Triggered by PR: ${pr_url}") - currentBuild.description = """PR #${pr_number}: ${pr_title}""" + println("Triggered by GitHub: ${pr_to_clone_url}") + if ("$pr_number" == "Null") { + currentBuild.description = """Others: ${pr_title}""" + } + else { + currentBuild.description = """PR #${pr_number}: ${pr_title}""" + } runGradleCheck( gitRepoUrl: "${pr_from_clone_url}", @@ -105,6 +130,9 @@ pipeline { } post() { always { + sh ("cp -v `find search/build/reports/jacoco/ -name '*.xml' | head -n 1` codeCoverage.xml || echo") + junit allowEmptyResults: true, testResults: '**/build/test-results/**/*.xml' + archiveArtifacts artifacts: 'codeCoverage.xml', onlyIfSuccessful: true script { sh("rm -rf *") postCleanup() diff --git a/jenkins/opensearch-dashboards/bwc-test.jenkinsfile b/jenkins/opensearch-dashboards/bwc-test.jenkinsfile index dc407267c2..bfd1815f3e 100644 --- a/jenkins/opensearch-dashboards/bwc-test.jenkinsfile +++ b/jenkins/opensearch-dashboards/bwc-test.jenkinsfile @@ -40,9 +40,9 @@ pipeline { currentBuild.result = 'ABORTED' error("BWC Tests failed to start. Missing parameter: AGENT_LABEL.") } - if (!fileExists("manifests/${TEST_MANIFEST}")) { + if (TEST_MANIFEST == '' || !fileExists("manifests/${TEST_MANIFEST}")) { currentBuild.result = 'ABORTED' - error("BWC Tests failed to start. Test manifest not found in manifests/${TEST_MANIFEST}.") + error("BWC Tests failed to start. Test manifest was not provided or not found in manifests/${TEST_MANIFEST}.") } /* Rebuilding of this job will result in considering the upstream build as self($JOB_NAME) See https://issues.jenkins.io/browse/JENKINS-61590 for bug @@ -120,7 +120,7 @@ pipeline { icon: ':white_check_mark:', message: 'BWC Tests Successful', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', manifest: TEST_MANIFEST, ) @@ -136,7 +136,7 @@ pipeline { icon: ':warning:', message: 'Failed BWC Tests', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', manifest: TEST_MANIFEST, ) diff --git a/jenkins/opensearch-dashboards/distribution-build.jenkinsfile b/jenkins/opensearch-dashboards/distribution-build.jenkinsfile index 343017fb99..3529b16dba 100644 --- a/jenkins/opensearch-dashboards/distribution-build.jenkinsfile +++ b/jenkins/opensearch-dashboards/distribution-build.jenkinsfile @@ -518,7 +518,7 @@ pipeline { icon: ':white_check_mark:', message: 'Successful Build', extra: stashed, - credentialsId: 'BUILD_NOTICE_WEBHOOK', + credentialsId: 'jenkins-build-notice-webhook', manifest: "${INPUT_MANIFEST}" ) } @@ -534,7 +534,7 @@ pipeline { publishNotification( icon: ':warning:', message: 'Failed Build', - credentialsId: 'BUILD_NOTICE_WEBHOOK', + credentialsId: 'jenkins-build-notice-webhook', manifest: "${INPUT_MANIFEST}" ) } diff --git a/jenkins/opensearch-dashboards/integ-test.jenkinsfile b/jenkins/opensearch-dashboards/integ-test.jenkinsfile index 7924430b98..be59e11a32 100644 --- a/jenkins/opensearch-dashboards/integ-test.jenkinsfile +++ b/jenkins/opensearch-dashboards/integ-test.jenkinsfile @@ -36,9 +36,9 @@ pipeline { currentBuild.result = 'ABORTED' error("Integration Tests failed to start. Missing parameter: AGENT_LABEL.") } - if (!fileExists("manifests/${TEST_MANIFEST}")) { + if (TEST_MANIFEST == '' || !fileExists("manifests/${TEST_MANIFEST}")) { currentBuild.result = 'ABORTED' - error("Integration Tests failed to start. Test manifest not found in manifests/${TEST_MANIFEST}.") + error("Integration Tests failed to start. Test manifest was not provided or not found in manifests/${TEST_MANIFEST}.") } /* Rebuilding of this job will result in considering upstream build as self($JOB_NAME) See https://issues.jenkins.io/browse/JENKINS-61590 for bug @@ -115,7 +115,7 @@ pipeline { icon: ':white_check_mark:', message: 'Integration Tests Successful', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', manifest: TEST_MANIFEST, ) @@ -131,7 +131,7 @@ pipeline { icon: ':warning:', message: 'Failed Integration Tests', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', manifest: TEST_MANIFEST, ) diff --git a/jenkins/opensearch-maven-release/maven-sign-release.jenkinsfile b/jenkins/opensearch-maven-release/maven-sign-release.jenkinsfile index cc189e65ca..bd2b0210f0 100644 --- a/jenkins/opensearch-maven-release/maven-sign-release.jenkinsfile +++ b/jenkins/opensearch-maven-release/maven-sign-release.jenkinsfile @@ -22,6 +22,7 @@ pipeline { } environment { ARTIFACT_PATH = "distribution-build-opensearch/${VERSION}/${BUILD_ID}/linux/x64/tar/builds" + ARTIFACT_BUCKET_NAME = credentials('jenkins-artifact-bucket-name') } stages { stage('sign') { diff --git a/jenkins/opensearch-ruby/Jenkinsfile b/jenkins/opensearch-ruby/Jenkinsfile index b927bc4831..8112220a64 100644 --- a/jenkins/opensearch-ruby/Jenkinsfile +++ b/jenkins/opensearch-ruby/Jenkinsfile @@ -12,6 +12,9 @@ pipeline { trim: true ) } + environment { + ARTIFACT_BUCKET_NAME = credentials('jenkins-artifact-bucket-name') + } stages { stage('ruby-build-sign-upload') { agent { diff --git a/jenkins/opensearch/bwc-test.jenkinsfile b/jenkins/opensearch/bwc-test.jenkinsfile index 4bd0d2c4bf..b9a9db2c7f 100644 --- a/jenkins/opensearch/bwc-test.jenkinsfile +++ b/jenkins/opensearch/bwc-test.jenkinsfile @@ -43,9 +43,9 @@ pipeline { currentBuild.result = 'ABORTED' error("BWC Tests failed to start. Missing parameter: AGENT_LABEL.") } - if (!fileExists("manifests/${TEST_MANIFEST}")) { + if (TEST_MANIFEST == '' || !fileExists("manifests/${TEST_MANIFEST}")) { currentBuild.result = 'ABORTED' - error("BWC Tests failed to start. Test manifest not found in manifests/${TEST_MANIFEST}.") + error("BWC Tests failed to start. Test manifest not was provided or not found in manifests/${TEST_MANIFEST}.") } /* Rebuilding of this job will result in considering upstream build as self($JOB_NAME) See https://issues.jenkins.io/browse/JENKINS-61590 for bug @@ -123,7 +123,7 @@ pipeline { icon: ':white_check_mark:', message: 'BWC Tests Successful', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', manifest: TEST_MANIFEST, ) @@ -139,7 +139,7 @@ pipeline { icon: ':warning:', message: 'Failed BWC Tests', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', manifest: TEST_MANIFEST, ) diff --git a/jenkins/opensearch/distribution-build.jenkinsfile b/jenkins/opensearch/distribution-build.jenkinsfile index 546579b48e..7d98f55c3f 100644 --- a/jenkins/opensearch/distribution-build.jenkinsfile +++ b/jenkins/opensearch/distribution-build.jenkinsfile @@ -555,7 +555,7 @@ pipeline { icon: ':white_check_mark:', message: 'Successful Build', extra: stashed, - credentialsId: 'BUILD_NOTICE_WEBHOOK', + credentialsId: 'jenkins-build-notice-webhook', manifest: "${INPUT_MANIFEST}" ) } @@ -571,7 +571,7 @@ pipeline { publishNotification( icon: ':warning:', message: buildFailureMessage(), - credentialsId: 'BUILD_NOTICE_WEBHOOK', + credentialsId: 'jenkins-build-notice-webhook', manifest: "${INPUT_MANIFEST}" ) } diff --git a/jenkins/opensearch/integ-test.jenkinsfile b/jenkins/opensearch/integ-test.jenkinsfile index 986cd7b8e1..d5427f0bca 100644 --- a/jenkins/opensearch/integ-test.jenkinsfile +++ b/jenkins/opensearch/integ-test.jenkinsfile @@ -39,9 +39,9 @@ pipeline { currentBuild.result = 'ABORTED' error("Integration Tests failed to start. Missing parameter: AGENT_LABEL.") } - if (!fileExists("manifests/${TEST_MANIFEST}")) { + if (TEST_MANIFEST == '' || !fileExists("manifests/${TEST_MANIFEST}")) { currentBuild.result = 'ABORTED' - error("Integration Tests failed to start. Test manifest not found in manifests/${TEST_MANIFEST}.") + error("Integration Tests failed to start. Test manifest was not provided or not found in manifests/${TEST_MANIFEST}.") } /* Rebuilding of this job will result in considering upstream build as self($JOB_NAME) See https://issues.jenkins.io/browse/JENKINS-61590 for bug @@ -152,7 +152,7 @@ pipeline { icon: ':white_check_mark:', message: 'Integration Tests Successful', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', manifest: TEST_MANIFEST, ) @@ -168,7 +168,7 @@ pipeline { icon: ':warning:', message: 'Failed Integration Tests', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', manifest: TEST_MANIFEST, ) diff --git a/jenkins/opensearch/perf-test.jenkinsfile b/jenkins/opensearch/perf-test.jenkinsfile index 0ca6706e8e..d381f1f0c1 100644 --- a/jenkins/opensearch/perf-test.jenkinsfile +++ b/jenkins/opensearch/perf-test.jenkinsfile @@ -194,7 +194,7 @@ pipeline { icon: ':white_check_mark:', message: 'Performance Tests Successful', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', ) postCleanup() } @@ -208,7 +208,7 @@ pipeline { icon: ':warning:', message: 'Failed Performance Tests', extra: stashed, - credentialsId: 'INTEG_TEST_WEBHOOK', + credentialsId: 'jenkins-integ-test-webhook', ) postCleanup() } diff --git a/jenkins/promotion/promote-docker-ecr.jenkinsfile b/jenkins/promotion/promote-docker-ecr.jenkinsfile new file mode 100644 index 0000000000..4e02f4cda1 --- /dev/null +++ b/jenkins/promotion/promote-docker-ecr.jenkinsfile @@ -0,0 +1,86 @@ +lib = library(identifier: 'jenkins@20211123', retriever: legacySCM(scm)) + +pipeline { + options { + timeout(time: 1, unit: 'HOURS') + } + agent { + docker { + label 'Jenkins-Agent-al2-x64-c54xlarge-Docker-Host' + image 'opensearchstaging/ci-runner:ubuntu2004-x64-docker-buildx0.6.3-qemu5.0-awscli1.22-jdk11-v2' + args '-u root -v /var/run/docker.sock:/var/run/docker.sock' + } +} + parameters { + string( + name: 'SOURCE_IMAGES', + description: 'Comma separated list of product with its image tag that we want to promote from staging to production. E.g.: opensearch:2.0.1.3910, opensearch-dashboards:2.0.1, data-prepper:1.5.0-19.', + trim: true + ) + string( + name: 'RELEASE_VERSION', + description: 'Official release version on production repository. This is uniform for all source images. E.g.: 2.0.1, 1.5.0.', + trim: true + ) + booleanParam( + name: 'DOCKER_HUB_PROMOTE', + defaultValue: true, + description: 'Promote SOURCE_IMAGES to `opensearchproject` production Docker Hub.' + ) + booleanParam( + name: 'ECR_PROMOTE', + defaultValue: true, + description: 'Promote SOURCE_IMAGES to `gallery.ecr.aws/opensearchproject` production ECR repository.' + ) + booleanParam( + name: 'TAG_LATEST', + defaultValue: true, + description: 'Tag the copied image as latest' + ) + booleanParam( + name: 'TAG_MAJOR_VERSION', + defaultValue: true, + description: 'Tag the copied image with its major version. E.g.: 1.3.2 image will be tagged with 1 in the hub.' + ) + } + + stages { + stage('Parameters Check') { + steps { + script { + currentBuild.description = "Promoting ${SOURCE_IMAGES} to production hub." + if(SOURCE_IMAGES.isEmpty() || RELEASE_VERSION.isEmpty()) { + currentBuild.result = 'ABORTED' + error('Make sure at lease one product is added to be promoted with proper release version.') + } + } + } + } + stage('image-promote-to-prod') { + steps { + script { + for (product in SOURCE_IMAGES.split(',')) { + def productRepo = product.trim() + println("Promoting \"$productRepo\" from staging to production.") + promoteContainer( + imageRepository: productRepo, + version: RELEASE_VERSION, + dockerPromote: DOCKER_HUB_PROMOTE, + ecrPromote: ECR_PROMOTE, + latestTag: TAG_LATEST, + majorVersionTag: TAG_MAJOR_VERSION + ) + } + } + } + } + } + post() { + always { + script { + postCleanup() + sh "docker logout" + } + } + } +} diff --git a/jenkins/sign-artifacts/sign-standalone-artifacts.jenkinsfile b/jenkins/sign-artifacts/sign-standalone-artifacts.jenkinsfile index 464373ef4a..684d944b40 100644 --- a/jenkins/sign-artifacts/sign-standalone-artifacts.jenkinsfile +++ b/jenkins/sign-artifacts/sign-standalone-artifacts.jenkinsfile @@ -20,16 +20,19 @@ pipeline { description: 'Path to upload to artifacts and signatures on s3. Eg: dummy_project/1.0' ) choice( - choices: ['linux'], + choices: ['linux', 'windows'], name: 'DISTRIBUTION_PLATFORM', description: 'What platform is this distribution build for?' ) choice( choices: ['.sig', '.rpm'], name: 'SIGNATURE_TYPE', - description: 'What is signature file type?' + description: 'What is signature file type? Required only for linux signing.' ) } + environment { + ARTIFACT_BUCKET_NAME = credentials('jenkins-artifact-bucket-name') + } stages { stage('sign') { steps { @@ -52,11 +55,13 @@ pipeline { println("Note: only supported file types will be signed") for(filename in downloadedFiles){ - if (SIGNATURE_TYPE.equals('.sig')) { + if (DISTRIBUTION_PLATFORM == 'windows') { + filenamesForUrls.add(filename) + filenamesForUrls.add('signed/' + filename) + } else if (SIGNATURE_TYPE.equals('.sig')) { filenamesForUrls.add(filename) filenamesForUrls.add(filename + SIGNATURE_TYPE) - } - else { + } else { filenamesForUrls.add(filename) } } @@ -80,7 +85,6 @@ pipeline { artifactFileNames: filenamesForUrls, uploadPath: finalUploadPath ) - } } post() { diff --git a/jenkins/vulnerability-scan/whitesource-scan.jenkinsfile b/jenkins/vulnerability-scan/whitesource-scan.jenkinsfile index ca9033c768..7904cf7b99 100644 --- a/jenkins/vulnerability-scan/whitesource-scan.jenkinsfile +++ b/jenkins/vulnerability-scan/whitesource-scan.jenkinsfile @@ -44,9 +44,9 @@ pipeline { void scan() { sh """ - curl -SLO https://services.gradle.org/distributions/gradle-6.6.1-bin.zip - unzip gradle-6.6.1-bin.zip - GRADLE_HOME=`realpath gradle-6.6.1` + curl -SLO https://services.gradle.org/distributions/gradle-7.4.2-bin.zip + unzip gradle-7.4.2-bin.zip + GRADLE_HOME=`realpath gradle-7.4.2` export PATH=\${GRADLE_HOME}/bin:\$PATH gradle --version cd ./tools/vulnerability-scan/ diff --git a/manifests/1.0.0/opensearch-1.0.0-maven.yml b/legacy-manifests/1.0.0/opensearch-1.0.0-maven.yml similarity index 100% rename from manifests/1.0.0/opensearch-1.0.0-maven.yml rename to legacy-manifests/1.0.0/opensearch-1.0.0-maven.yml diff --git a/manifests/1.0.0/opensearch-1.0.0-test.yml b/legacy-manifests/1.0.0/opensearch-1.0.0-test.yml similarity index 100% rename from manifests/1.0.0/opensearch-1.0.0-test.yml rename to legacy-manifests/1.0.0/opensearch-1.0.0-test.yml diff --git a/manifests/1.0.0/opensearch-1.0.0.yml b/legacy-manifests/1.0.0/opensearch-1.0.0.yml similarity index 100% rename from manifests/1.0.0/opensearch-1.0.0.yml rename to legacy-manifests/1.0.0/opensearch-1.0.0.yml diff --git a/manifests/1.0.1/opensearch-1.0.1-test.yml b/legacy-manifests/1.0.1/opensearch-1.0.1-test.yml similarity index 100% rename from manifests/1.0.1/opensearch-1.0.1-test.yml rename to legacy-manifests/1.0.1/opensearch-1.0.1-test.yml diff --git a/manifests/1.0.1/opensearch-1.0.1.yml b/legacy-manifests/1.0.1/opensearch-1.0.1.yml similarity index 100% rename from manifests/1.0.1/opensearch-1.0.1.yml rename to legacy-manifests/1.0.1/opensearch-1.0.1.yml diff --git a/manifests/1.0.1/opensearch-dashboards-1.0.1.yml b/legacy-manifests/1.0.1/opensearch-dashboards-1.0.1.yml similarity index 100% rename from manifests/1.0.1/opensearch-dashboards-1.0.1.yml rename to legacy-manifests/1.0.1/opensearch-dashboards-1.0.1.yml diff --git a/manifests/1.1.0/opensearch-1.1.0-test.yml b/legacy-manifests/1.1.0/opensearch-1.1.0-test.yml similarity index 100% rename from manifests/1.1.0/opensearch-1.1.0-test.yml rename to legacy-manifests/1.1.0/opensearch-1.1.0-test.yml diff --git a/manifests/1.1.0/opensearch-1.1.0.yml b/legacy-manifests/1.1.0/opensearch-1.1.0.yml similarity index 100% rename from manifests/1.1.0/opensearch-1.1.0.yml rename to legacy-manifests/1.1.0/opensearch-1.1.0.yml diff --git a/manifests/1.1.0/opensearch-dashboards-1.1.0-test.yml b/legacy-manifests/1.1.0/opensearch-dashboards-1.1.0-test.yml similarity index 100% rename from manifests/1.1.0/opensearch-dashboards-1.1.0-test.yml rename to legacy-manifests/1.1.0/opensearch-dashboards-1.1.0-test.yml diff --git a/manifests/1.1.0/opensearch-dashboards-1.1.0.yml b/legacy-manifests/1.1.0/opensearch-dashboards-1.1.0.yml similarity index 100% rename from manifests/1.1.0/opensearch-dashboards-1.1.0.yml rename to legacy-manifests/1.1.0/opensearch-dashboards-1.1.0.yml diff --git a/manifests/1.1.1/opensearch-1.1.1-test.yml b/legacy-manifests/1.1.1/opensearch-1.1.1-test.yml similarity index 100% rename from manifests/1.1.1/opensearch-1.1.1-test.yml rename to legacy-manifests/1.1.1/opensearch-1.1.1-test.yml diff --git a/manifests/1.1.1/opensearch-1.1.1.yml b/legacy-manifests/1.1.1/opensearch-1.1.1.yml similarity index 100% rename from manifests/1.1.1/opensearch-1.1.1.yml rename to legacy-manifests/1.1.1/opensearch-1.1.1.yml diff --git a/manifests/1.1.1/opensearch-dashboards-1.1.1-test.yml b/legacy-manifests/1.1.1/opensearch-dashboards-1.1.1-test.yml similarity index 100% rename from manifests/1.1.1/opensearch-dashboards-1.1.1-test.yml rename to legacy-manifests/1.1.1/opensearch-dashboards-1.1.1-test.yml diff --git a/manifests/1.1.1/opensearch-dashboards-1.1.1.yml b/legacy-manifests/1.1.1/opensearch-dashboards-1.1.1.yml similarity index 100% rename from manifests/1.1.1/opensearch-dashboards-1.1.1.yml rename to legacy-manifests/1.1.1/opensearch-dashboards-1.1.1.yml diff --git a/manifests/1.2.0/opensearch-1.2.0-test.yml b/legacy-manifests/1.2.0/opensearch-1.2.0-test.yml similarity index 100% rename from manifests/1.2.0/opensearch-1.2.0-test.yml rename to legacy-manifests/1.2.0/opensearch-1.2.0-test.yml diff --git a/manifests/1.2.0/opensearch-1.2.0.yml b/legacy-manifests/1.2.0/opensearch-1.2.0.yml similarity index 100% rename from manifests/1.2.0/opensearch-1.2.0.yml rename to legacy-manifests/1.2.0/opensearch-1.2.0.yml diff --git a/manifests/1.2.0/opensearch-dashboards-1.2.0-test.yml b/legacy-manifests/1.2.0/opensearch-dashboards-1.2.0-test.yml similarity index 100% rename from manifests/1.2.0/opensearch-dashboards-1.2.0-test.yml rename to legacy-manifests/1.2.0/opensearch-dashboards-1.2.0-test.yml diff --git a/manifests/1.2.0/opensearch-dashboards-1.2.0.yml b/legacy-manifests/1.2.0/opensearch-dashboards-1.2.0.yml similarity index 100% rename from manifests/1.2.0/opensearch-dashboards-1.2.0.yml rename to legacy-manifests/1.2.0/opensearch-dashboards-1.2.0.yml diff --git a/manifests/1.2.1/opensearch-1.2.1-test.yml b/legacy-manifests/1.2.1/opensearch-1.2.1-test.yml similarity index 100% rename from manifests/1.2.1/opensearch-1.2.1-test.yml rename to legacy-manifests/1.2.1/opensearch-1.2.1-test.yml diff --git a/manifests/1.2.1/opensearch-1.2.1.yml b/legacy-manifests/1.2.1/opensearch-1.2.1.yml similarity index 100% rename from manifests/1.2.1/opensearch-1.2.1.yml rename to legacy-manifests/1.2.1/opensearch-1.2.1.yml diff --git a/manifests/1.2.1/opensearch-dashboards-1.2.1.yml b/legacy-manifests/1.2.1/opensearch-dashboards-1.2.1.yml similarity index 100% rename from manifests/1.2.1/opensearch-dashboards-1.2.1.yml rename to legacy-manifests/1.2.1/opensearch-dashboards-1.2.1.yml diff --git a/manifests/1.2.2/opensearch-1.2.2-test.yml b/legacy-manifests/1.2.2/opensearch-1.2.2-test.yml similarity index 100% rename from manifests/1.2.2/opensearch-1.2.2-test.yml rename to legacy-manifests/1.2.2/opensearch-1.2.2-test.yml diff --git a/manifests/1.2.2/opensearch-1.2.2.yml b/legacy-manifests/1.2.2/opensearch-1.2.2.yml similarity index 100% rename from manifests/1.2.2/opensearch-1.2.2.yml rename to legacy-manifests/1.2.2/opensearch-1.2.2.yml diff --git a/manifests/1.2.3/opensearch-1.2.3-test.yml b/legacy-manifests/1.2.3/opensearch-1.2.3-test.yml similarity index 100% rename from manifests/1.2.3/opensearch-1.2.3-test.yml rename to legacy-manifests/1.2.3/opensearch-1.2.3-test.yml diff --git a/manifests/1.2.3/opensearch-1.2.3.yml b/legacy-manifests/1.2.3/opensearch-1.2.3.yml similarity index 100% rename from manifests/1.2.3/opensearch-1.2.3.yml rename to legacy-manifests/1.2.3/opensearch-1.2.3.yml diff --git a/manifests/1.2.4/opensearch-1.2.4-test.yml b/legacy-manifests/1.2.4/opensearch-1.2.4-test.yml similarity index 100% rename from manifests/1.2.4/opensearch-1.2.4-test.yml rename to legacy-manifests/1.2.4/opensearch-1.2.4-test.yml diff --git a/manifests/1.2.4/opensearch-1.2.4.yml b/legacy-manifests/1.2.4/opensearch-1.2.4.yml similarity index 100% rename from manifests/1.2.4/opensearch-1.2.4.yml rename to legacy-manifests/1.2.4/opensearch-1.2.4.yml diff --git a/manifests/1.2.5/opensearch-1.2.5.yml b/legacy-manifests/1.2.5/opensearch-1.2.5.yml similarity index 100% rename from manifests/1.2.5/opensearch-1.2.5.yml rename to legacy-manifests/1.2.5/opensearch-1.2.5.yml diff --git a/manifests/1.3.4/opensearch-1.3.4-test.yml b/manifests/1.3.4/opensearch-1.3.4-test.yml new file mode 100644 index 0000000000..f50fd7fc1f --- /dev/null +++ b/manifests/1.3.4/opensearch-1.3.4-test.yml @@ -0,0 +1,71 @@ +--- +schema-version: '1.0' +name: OpenSearch +ci: + image: + name: opensearchstaging/ci-runner:ci-runner-centos7-opensearch-build-v2 + args: -e JAVA_HOME=/opt/java/openjdk-11 +components: + - name: index-management + integ-test: + build-dependencies: + - job-scheduler + test-configs: + - with-security + - without-security + additional-cluster-configs: + path.repo: [/tmp] + + - name: anomaly-detection + integ-test: + build-dependencies: + - job-scheduler + test-configs: + - with-security + - without-security + + - name: asynchronous-search + integ-test: + test-configs: + - with-security + - without-security + + - name: alerting + integ-test: + test-configs: + - with-security + - without-security + additional-cluster-configs: + plugins.destination.host.deny_list: [10.0.0.0/8, 127.0.0.1] + + - name: sql + integ-test: + test-configs: + - with-security + - without-security + additional-cluster-configs: + script.context.field.max_compilations_rate: 1000/1m + + - name: k-NN + integ-test: + test-configs: + - with-security + - without-security + + - name: dashboards-reports + working-directory: reports-scheduler + integ-test: + test-configs: + - without-security + + - name: observability + working-directory: opensearch-observability + integ-test: + test-configs: + - without-security + + - name: ml-commons + integ-test: + test-configs: + - with-security + - without-security diff --git a/manifests/1.3.4/opensearch-1.3.4.yml b/manifests/1.3.4/opensearch-1.3.4.yml index 9b7b550c63..a475d2dc49 100644 --- a/manifests/1.3.4/opensearch-1.3.4.yml +++ b/manifests/1.3.4/opensearch-1.3.4.yml @@ -6,6 +6,7 @@ build: ci: image: name: opensearchstaging/ci-runner:ci-runner-centos7-opensearch-build-v2 + args: -e JAVA_HOME=/opt/java/openjdk-11 components: - name: OpenSearch repository: https://github.com/opensearch-project/OpenSearch.git @@ -13,3 +14,95 @@ components: checks: - gradle:publish - gradle:properties:version + - name: common-utils + repository: https://github.com/opensearch-project/common-utils.git + ref: '1.3' + checks: + - gradle:publish + - gradle:properties:version + - name: job-scheduler + repository: https://github.com/opensearch-project/job-scheduler.git + ref: '1.3' + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version + - name: ml-commons + repository: https://github.com/opensearch-project/ml-commons.git + ref: '1.3' + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version: opensearch-ml-plugin + - name: security + repository: https://github.com/opensearch-project/security.git + ref: '1.3' + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version + - name: k-NN + repository: https://github.com/opensearch-project/k-NN.git + ref: '1.3' + platforms: + - darwin + - linux + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version + - name: asynchronous-search + repository: https://github.com/opensearch-project/asynchronous-search.git + ref: '1.3' + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version + - name: performance-analyzer + repository: https://github.com/opensearch-project/performance-analyzer.git + ref: '1.3' + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version + platforms: + - darwin + - linux + - name: alerting + repository: https://github.com/opensearch-project/alerting.git + ref: '1.3' + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version: alerting + - name: anomaly-detection + repository: https://github.com/opensearch-project/anomaly-detection.git + ref: '1.3' + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version + - name: index-management + repository: https://github.com/opensearch-project/index-management.git + ref: '1.3' + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version + - name: cross-cluster-replication + repository: https://github.com/opensearch-project/cross-cluster-replication.git + ref: '1.3' + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version + - name: dashboards-reports + repository: https://github.com/opensearch-project/dashboards-reports.git + ref: '1.3' + working_directory: reports-scheduler + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version + - name: opensearch-observability + repository: https://github.com/opensearch-project/observability + ref: '1.3' + working_directory: opensearch-observability + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version + - name: sql + repository: https://github.com/opensearch-project/sql.git + ref: '1.3' + checks: + - gradle:properties:version + - gradle:dependencies:opensearch.version: plugin diff --git a/manifests/1.3.4/opensearch-dashboards-1.3.4-test.yml b/manifests/1.3.4/opensearch-dashboards-1.3.4-test.yml new file mode 100644 index 0000000000..3ad90cc802 --- /dev/null +++ b/manifests/1.3.4/opensearch-dashboards-1.3.4-test.yml @@ -0,0 +1,17 @@ +--- +schema-version: '1.0' +name: OpenSearch Dashboards +ci: + image: + name: opensearchstaging/ci-runner:ci-runner-rockylinux8-opensearch-dashboards-integtest-v2 +components: + - name: OpenSearch-Dashboards + bwc-test: + test-configs: + - with-security + - without-security + - name: functionalTestDashboards + integ-test: + test-configs: + - with-security + - without-security diff --git a/manifests/1.3.4/opensearch-dashboards-1.3.4.yml b/manifests/1.3.4/opensearch-dashboards-1.3.4.yml index 58d1f700aa..d77a3e207b 100644 --- a/manifests/1.3.4/opensearch-dashboards-1.3.4.yml +++ b/manifests/1.3.4/opensearch-dashboards-1.3.4.yml @@ -10,3 +10,34 @@ components: - name: OpenSearch-Dashboards repository: https://github.com/opensearch-project/OpenSearch-Dashboards.git ref: '1.3' + - name: functionalTestDashboards + repository: https://github.com/opensearch-project/opensearch-dashboards-functional-test.git + ref: '1.3' + - name: securityDashboards + repository: https://github.com/opensearch-project/security-dashboards-plugin.git + ref: '1.3' + - name: anomalyDetectionDashboards + repository: https://github.com/opensearch-project/anomaly-detection-dashboards-plugin + ref: '1.3' + - name: indexManagementDashboards + repository: https://github.com/opensearch-project/index-management-dashboards-plugin + ref: '1.3' + - name: queryWorkbenchDashboards + repository: https://github.com/opensearch-project/sql.git + working_directory: workbench + ref: '1.3' + - name: reportsDashboards + repository: https://github.com/opensearch-project/dashboards-reports.git + working_directory: dashboards-reports + ref: '1.3' + - name: ganttChartDashboards + repository: https://github.com/opensearch-project/dashboards-visualizations.git + working_directory: gantt-chart + ref: '1.3' + - name: observabilityDashboards + repository: https://github.com/opensearch-project/observability.git + working_directory: dashboards-observability + ref: '1.3' + - name: alertingDashboards + repository: https://github.com/opensearch-project/alerting-dashboards-plugin.git + ref: '1.3' diff --git a/manifests/2.1.0/opensearch-2.1.0.yml b/manifests/2.1.0/opensearch-2.1.0.yml index 142353d7d1..39535db408 100644 --- a/manifests/2.1.0/opensearch-2.1.0.yml +++ b/manifests/2.1.0/opensearch-2.1.0.yml @@ -10,85 +10,85 @@ ci: components: - name: OpenSearch repository: https://github.com/opensearch-project/OpenSearch.git - ref: '2.1' + ref: tags/2.1.0 checks: - gradle:publish - gradle:properties:version - name: common-utils repository: https://github.com/opensearch-project/common-utils.git - ref: '2.1' + ref: tags/2.1.0.0 checks: - gradle:publish - gradle:properties:version - name: job-scheduler repository: https://github.com/opensearch-project/job-scheduler.git - ref: '2.1' + ref: tags/2.1.0.0 checks: - gradle:properties:version - gradle:dependencies:opensearch.version - name: ml-commons repository: https://github.com/opensearch-project/ml-commons.git - ref: '2.1' + ref: tags/2.1.0.0 checks: - gradle:properties:version - gradle:dependencies:opensearch.version: opensearch-ml-plugin - name: performance-analyzer repository: https://github.com/opensearch-project/performance-analyzer.git - ref: '2.1' + ref: tags/2.1.0.0 checks: - gradle:properties:version - gradle:dependencies:opensearch.version - name: security repository: https://github.com/opensearch-project/security.git - ref: '2.1' + ref: tags/2.1.0.0 - name: notifications-core repository: https://github.com/opensearch-project/notifications.git - ref: '2.1' + ref: tags/2.1.0.0 working_directory: notifications checks: - gradle:properties:version - gradle:dependencies:opensearch.version: opensearch-notifications-core - name: notifications repository: https://github.com/opensearch-project/notifications.git - ref: '2.1' + ref: tags/2.1.0.0 working_directory: notifications checks: - gradle:properties:version - gradle:dependencies:opensearch.version: notifications - name: index-management repository: https://github.com/opensearch-project/index-management.git - ref: '2.1' + ref: tags/2.1.0.0 checks: - gradle:properties:version - name: sql repository: https://github.com/opensearch-project/sql.git - ref: '2.1' + ref: tags/2.1.0.0 checks: - gradle:properties:version - - gradle:dependencies:opensearch.version: plugin + - gradle:dependencies:opensearch.version: opensearch-sql-plugin - name: opensearch-observability repository: https://github.com/opensearch-project/observability - ref: '2.1' + ref: tags/2.1.0.0 working_directory: opensearch-observability checks: - gradle:properties:version - gradle:dependencies:opensearch.version - name: dashboards-reports repository: https://github.com/opensearch-project/dashboards-reports.git - ref: '2.1' + ref: tags/2.1.0.0 working_directory: reports-scheduler checks: - gradle:properties:version - gradle:dependencies:opensearch.version - name: alerting repository: https://github.com/opensearch-project/alerting.git - ref: '2.1' + ref: tags/2.1.0.0 checks: - gradle:properties:version - gradle:dependencies:opensearch.version: alerting - name: k-NN repository: https://github.com/opensearch-project/k-NN.git - ref: '2.1' + ref: tags/2.1.0.0 platforms: - darwin - linux @@ -97,19 +97,19 @@ components: - gradle:dependencies:opensearch.version - name: anomaly-detection repository: https://github.com/opensearch-project/anomaly-detection.git - ref: '2.1' + ref: tags/2.1.0.0 checks: - gradle:properties:version - gradle:dependencies:opensearch.version - name: asynchronous-search repository: https://github.com/opensearch-project/asynchronous-search.git - ref: '2.1' + ref: tags/2.1.0.0 checks: - gradle:properties:version - gradle:dependencies:opensearch.version - name: cross-cluster-replication repository: https://github.com/opensearch-project/cross-cluster-replication.git - ref: '2.1' + ref: tags/2.1.0.0 checks: - gradle:properties:version - gradle:dependencies:opensearch.version diff --git a/manifests/2.1.0/opensearch-dashboards-2.1.0.yml b/manifests/2.1.0/opensearch-dashboards-2.1.0.yml index a4b39a2b00..8cb61dd2c6 100644 --- a/manifests/2.1.0/opensearch-dashboards-2.1.0.yml +++ b/manifests/2.1.0/opensearch-dashboards-2.1.0.yml @@ -9,36 +9,41 @@ ci: components: - name: OpenSearch-Dashboards repository: https://github.com/opensearch-project/OpenSearch-Dashboards.git - ref: 2.x + ref: tags/2.1.0 + checks: + - npm:package:version + - name: functionalTestDashboards + repository: https://github.com/opensearch-project/opensearch-dashboards-functional-test.git + ref: tags/2.1.0 - name: notificationsDashboards repository: https://github.com/opensearch-project/notifications.git working_directory: dashboards-notifications - ref: main + ref: tags/2.1.0.0 - name: queryWorkbenchDashboards repository: https://github.com/opensearch-project/sql.git working_directory: workbench - ref: main + ref: tags/2.1.0.0 - name: observabilityDashboards repository: https://github.com/opensearch-project/observability.git working_directory: dashboards-observability - ref: main + ref: tags/2.1.0.0 - name: ganttChartDashboards repository: https://github.com/opensearch-project/dashboards-visualizations.git working_directory: gantt-chart - ref: main + ref: tags/2.1.0.0 - name: reportsDashboards repository: https://github.com/opensearch-project/dashboards-reports.git working_directory: dashboards-reports - ref: main + ref: tags/2.1.0.0 - name: securityDashboards repository: https://github.com/opensearch-project/security-dashboards-plugin.git - ref: main + ref: tags/2.1.0.0 - name: alertingDashboards repository: https://github.com/opensearch-project/alerting-dashboards-plugin.git - ref: main + ref: tags/2.1.0.0 - name: anomalyDetectionDashboards repository: https://github.com/opensearch-project/anomaly-detection-dashboards-plugin - ref: main + ref: tags/2.1.0.0 - name: indexManagementDashboards repository: https://github.com/opensearch-project/index-management-dashboards-plugin.git - ref: main + ref: tags/2.1.0.0 diff --git a/manifests/2.1.1/opensearch-2.1.1.yml b/manifests/2.1.1/opensearch-2.1.1.yml new file mode 100644 index 0000000000..890d6456d7 --- /dev/null +++ b/manifests/2.1.1/opensearch-2.1.1.yml @@ -0,0 +1,15 @@ +--- +schema-version: '1.0' +build: + name: OpenSearch + version: 2.1.1 +ci: + image: + name: opensearchstaging/ci-runner:ci-runner-centos7-opensearch-build-v2 +components: + - name: OpenSearch + repository: https://github.com/opensearch-project/OpenSearch.git + ref: '2.1' + checks: + - gradle:publish + - gradle:properties:version diff --git a/manifests/2.2.0/opensearch-2.2.0.yml b/manifests/2.2.0/opensearch-2.2.0.yml index f5383b5af7..cbcfea920a 100644 --- a/manifests/2.2.0/opensearch-2.2.0.yml +++ b/manifests/2.2.0/opensearch-2.2.0.yml @@ -6,6 +6,7 @@ build: ci: image: name: opensearchstaging/ci-runner:ci-runner-centos7-opensearch-build-v2 + args: -e JAVA_HOME=/opt/java/openjdk-17 components: - name: OpenSearch repository: https://github.com/opensearch-project/OpenSearch.git diff --git a/release-notes/opensearch-release-notes-2.1.0.md b/release-notes/opensearch-release-notes-2.1.0.md new file mode 100644 index 0000000000..a3b8d9e8be --- /dev/null +++ b/release-notes/opensearch-release-notes-2.1.0.md @@ -0,0 +1,245 @@ +# OpenSearch and Dashboards 2.1.0 Release Notes + +## Release Highlights + + +* OpenSearch 2.1.0 supports version spoofing against 2.0.0 where it was [removed](https://github.com/opensearch-project/opensearch/pull/3530). +* You can now configure nodes with [dynamic nodes roles](https://github.com/opensearch-project/OpenSearch/pull/3436), which allows for custom node roles that won't affect node start processes. +* The [ML node role](https://github.com/opensearch-project/ml-commons/pull/346) can be configured for ML functions and tasks. +* SQL and PPL queries now supports [relevance-based search](https://github.com/opensearch-project/sql/issues/182), including [match_function](https://github.com/opensearch-project/sql/pull/204], [match_phrase](https://github.com/opensearch-project/sql/pull/604), and [match_bool_prefix](https://github.com/opensearch-project/sql/pull/634). +* OpenSearch now supports [multi-term aggregation](https://github.com/opensearch-project/OpenSearch/pull/2687). + +## Release Details + +OpenSearch and OpenSearch Dashboards 2.1.0 includes the following features, enhancements, bug fixes, infrastructure, documentation, maintenance, and refactoring updates. + +OpenSearch [Release Notes](https://github.com/opensearch-project/OpenSearch/blob/main/release-notes/opensearch.release-notes-2.1.0.md). + +OpenSearch Dashboards [Release Notes](https://github.com/opensearch-project/OpenSearch-Dashboards/blob/main/release-notes/opensearch-dashboards.release-notes-2.1.0.md). + +## FEATURES + +### Opensearch Index Management +* Merge snapshot management into main branch ([#390](https://github.com/opensearch-project/index-management/pull/390)) +* Adds snapshot management notification implementation ([#387](https://github.com/opensearch-project/index-management/pull/387)) +* Snapshot management default date format in snapshot name ([#392](https://github.com/opensearch-project/index-management/pull/392)) + + +### Opensearch Index Management Dashboards Plugin +* Merge snapshot management into main branch ([#205](https://github.com/opensearch-project/index-management-dashboards-plugin/pull/205)) + + +### Opensearch Ml Common +* Dispatch ML task to ML node first ([#346](https://github.com/opensearch-project/ml-commons/pull/346)) + + +### Opensearch Performance Analyzer +* Thread Metrics RCA ([#180](https://github.com/opensearch-project/performance-analyzer/pull/180)) + + +### Opensearch SQL +* Support match_phrase filter function in SQL and PPL ([#604](https://github.com/opensearch-project/sql/pull/604)) +* Add implementation for `simple_query_string` relevance search function in SQL and PPL ([#635](https://github.com/opensearch-project/sql/pull/635)) +* Add multi_match to SQL plugin ([#649](https://github.com/opensearch-project/sql/pull/649)) +* Integ match bool prefix #187 ([#634](https://github.com/opensearch-project/sql/pull/634)) +* PPL describe command ([#646](https://github.com/opensearch-project/sql/pull/646)) + + +## ENHANCEMENT + +### Opensearch Index Management Dashboards Plugin +* Snapshot management small fixes ([#208](https://github.com/opensearch-project/index-management-dashboards-plugin/pull/208)) +* Tune the column width, fix the problem of showing snapshot failures ([#210](https://github.com/opensearch-project/index-management-dashboards-plugin/pull/210)) + + +### Opensearch Security Dashboards Plugin +* Dynamically compute OpenID redirectUri from proxy HTTP headers ([#929](https://github.com/opensearch-project/security-dashboards-plugin/pull/929)) +* Clear the sessionStorage when logging out ([#1003](https://github.com/opensearch-project/security-dashboards-plugin/pull/1003)) +S + +## BUG FIX + +### Opensearch Security +* Cluster permissions evaluation logic will now include `index_template` type action ([#1885](https://github.com/opensearch-project/security/pull/1885)) +* Add missing settings to plugin allowed list ([#1814](https://github.com/opensearch-project/security/pull/1814)) +* Updates license headers ([#1829](https://github.com/opensearch-project/security/pull/1829)) +* Prevent recursive action groups ([#1868](https://github.com/opensearch-project/security/pull/1868)) +* Update `org.springframework:spring-core` to `5.3.20` ([#1850](https://github.com/opensearch-project/security/pull/1850)) + + +### Opensearch Security Dashboards Plugin +* Disable private tenant for read only users ([#868](https://github.com/opensearch-project/security-dashboards-plugin/pull/868)) +* Replace _opendistro route with _plugins ([#895](https://github.com/opensearch-project/security-dashboards-plugin/pull/895)) +ES + +### Opensearch SQL +* Integ replace junit assertthat with hamcrest import ([#616](https://github.com/opensearch-project/sql/pull/616)) +* Integ relevance function it fix ([#608](https://github.com/opensearch-project/sql/pull/608)) +* Fix merge conflict on function name ([#664](https://github.com/opensearch-project/sql/pull/664)) +* Fix `fuzziness` parsing in `multi_match` function. Update tests. ([#668](https://github.com/opensearch-project/sql/pull/668)) +* ODBC SSL Compliance Fix ([#653](https://github.com/opensearch-project/sql/pull/653)) + + +## INFRASTRUCTURE + +### Opensearch Anomaly Detection +* Cluster manager revert fix ([#584](https://github.com/opensearch-project/anomaly-detection/pull/584)) +* Adding HCAD data ingestion script to AD ([#585](https://github.com/opensearch-project/anomaly-detection/pull/585)) +* Update ingestion ([#592](https://github.com/opensearch-project/anomaly-detection/pull/592)) +* Adding custom plugin to publish zip to maven ([#594](https://github.com/opensearch-project/anomaly-detection/pull/594)) + + +### Opensearch Anomaly Detection Dashboards +* Added UT for validation API related components ([#252](https://github.com/opensearch-project/anomaly-detection-dashboards-plugin/pull/252)) +* Run UT/IT on all branches ([#228](https://github.com/opensearch-project/anomaly-detection-dashboards-plugin/pull/228)) + + +### Opensearch Index Management +* Uses custom plugin to publish zips to maven ([#366](https://github.com/opensearch-project/index-management/pull/366)) + + +### Opensearch k-NN + +* Update opensearch version in BWCWorkflow ([#402](https://github.com/opensearch-project/k-NN/pull/402)) +* Adding workflow for creating documentation issues ([#403](https://github.com/opensearch-project/k-NN/pull/403)) +* Add Querying Functionality to OSB ([#409](https://github.com/opensearch-project/k-NN/pull/409)) +* Add OpenSearch Benchmark index workload for k-NN ([#364](https://github.com/opensearch-project/k-NN/pull/364)) +* Set tests.security.manager flag to false in integTestRemote task +([#410](https://github.com/opensearch-project/k-NN/pull/410)) + + +### Opensearch Ml Common +* Bump RCF version to 3.0-rc3 ([#340](https://github.com/opensearch-project/ml-commons/pull/340)) + + +### Opensearch Observability +* Uses custom plugin to publish zips to maven ([#786](https://github.com/opensearch-project/observability/pull/786)) + + +### Opensearch SQL +* Match Query Unit Tests ([#614](https://github.com/opensearch-project/sql/pull/614)) +* Uses custom plugin to publish zips to maven ([#638](https://github.com/opensearch-project/sql/pull/638)) + + +## DOCUMENTATION + +### Opensearch Alerting +* Added 2.1 release notes. ([#485](https://github.com/opensearch-project/alerting/pull/485)) + + +### Opensearch Alerting Dashboards Plugin +* Added 2.1 release notes. ([#284](https://github.com/opensearch-project/alerting-dashboards-plugin/pull/284)) + + +### Opensearch Common Utils +* Added 2.1 release notes. ([#194](https://github.com/opensearch-project/common-utils/pull/194)) + + +### Opensearch Index Management +* Updated issue templates from .github. ([#324](https://github.com/opensearch-project/index-management/pull/324)) + + +### Opensearch Index Management Dashboards Plugin +* Adding workflow for creating documentation issues. ([#197](https://github.com/opensearch-project/index-management-dashboards-plugin/pull/197)) + + +### OpenSearch Job Scheduler +* Added 2.1 release notes. ([#198](https://github.com/opensearch-project/job-scheduler/pull/198)) + + +## MAINTENANCE + +### Opensearch Anomaly Detection +* 2.1 version bump and Gradle bump ([#582](https://github.com/opensearch-project/anomaly-detection/pull/582)) + + +### Opensearch Anomaly Detection Dashboards +* Bump to 2.1.0 compatibility ([#282](https://github.com/opensearch-project/anomaly-detection-dashboards-plugin/pull/282)) + +### Opensearch Alerting +* Bumped version to 2.1.0, and gradle to 7.4.2. ([#475](https://github.com/opensearch-project/alerting/pull/475])) + + +### Opensearch Alerting Dashboards Plugin +* Bumped version from 2.0.1 to 2.1.0. ([#277](https://github.com/opensearch-project/alerting-dashboards-plugin/pull/277)) +* Bumped OpenSearch-Dashboards branch used by by the unit-tests-workflow. ([#278](https://github.com/opensearch-project/alerting-dashboards-plugin/pull/278)) + + +### Opensearch Common Utils +* Upgrade gradle to 7.4.2. ([#191](https://github.com/opensearch-project/common-utils/pull/191)) +* Bump up the version to 2.1. ([#190](https://github.com/opensearch-project/common-utils/pull/190)) + + +### Opensearch Dashboards Visualizations +* Version bump to 2.1.0 ([#89](https://github.com/opensearch-project/dashboards-visualizations/pull/89)) + + +### Opensearch Index Management +* Version upgrade to 2.1.0 ([#389](https://github.com/opensearch-project/index-management/pull/389)) + + +### Opensearch k-NN +* Bumping main version to opensearch core 2.1.0 ([#411](https://github.com/opensearch-project/k-NN/pull/411)) + + +### Opensearch Index Management Dashboards Plugin +* Version bump 2.1.0 ([#206](https://github.com/opensearch-project/index-management-dashboards-plugin/pull/206)) + + +### OpenSearch Job Scheduler +* Bump up the OS version to 2.1. ([#195](https://github.com/opensearch-project/job-scheduler/pull/195)) + + +### Opensearch Notifications +* Upgrade Notifications and Notifications Dashboards to 2.1 ([#468](https://github.com/opensearch-project/notifications/pull/468)) +* Fix Email test for security integration test ([#462](https://github.com/opensearch-project/notifications/pull/462)) + + +### Opensearch Observability +* Bump version to 2.1.0 and gradle version to 7.4.2 ([#817](https://github.com/opensearch-project/observability/pull/817)) + + +### Opensearch Performance Analyzer +* Update 2.1 release version ([#192](https://github.com/opensearch-project/performance-analyzer-rca/pull/192)) +* Update 2.1 release version ([#232](https://github.com/opensearch-project/performance-analyzer/pull/232)) + + +### Opensearch Security +* Revert "Bump version to 2.1.0.0 (#1865)" ([#1882](https://github.com/opensearch-project/security/pull/1882)) +* Bump version to 2.1.0.0 ([#1865](https://github.com/opensearch-project/security/pull/1865)) +* Revert "Bump version to 2.1.0.0 (#1855)" ([#1864](https://github.com/opensearch-project/security/pull/1864)) +* Bump version to 2.1.0.0 ([#1855](https://github.com/opensearch-project/security/pull/1855)) +* Add suppression for all removal warnings ([#1828](https://github.com/opensearch-project/security/pull/1828)) +* Update support link ([#1851](https://github.com/opensearch-project/security/pull/1851)) +* Create 2.0.0 release notes ([#1854](https://github.com/opensearch-project/security/pull/1854)) +* Switch to standard OpenSearch gradle build ([#1888](https://github.com/opensearch-project/security/pull/1888)) +* Fix build break from cluster manager changes ([#1911](https://github.com/opensearch-project/security/pull/1911)) +* Update org.apache.zookeeper:zookeeper to 3.7.1 ([#1912](https://github.com/opensearch-project/security/pull/1912)) + + +### Opensearch Security Dashboards Plugin +* Bump version to 2.1.0.0 ([#1004](https://github.com/opensearch-project/security-dashboards-plugin/pull/1004)) +* Adds 1.3.1.0 release notes ([#988](https://github.com/opensearch-project/security-dashboards-plugin/pull/988)) +* Create release notes 2.0.0 ([#996](https://github.com/opensearch-project/security-dashboards-plugin/pull/996)) + + +### Opensearch SQL +* Change plugin folder name to opensearch-sql-plugin ([#670](https://github.com/opensearch-project/sql/pull/670)) +* Version bump to 2.1.0 and gradle version bump ([#655](https://github.com/opensearch-project/sql/pull/655)) + + +## REFACTORING + +### Opensearch k-NN +* Adding support for Lombok ([#393](https://github.com/opensearch-project/k-NN/pull/393)) + + +### Opensearch Observability +* Make common delete modal for components ([#766](https://github.com/opensearch-project/observability/pull/766)) +* Sync app and app list types ([#763](https://github.com/opensearch-project/observability/pull/763)) + + +### Opensearch Security +* Remove master keywords ([#1886](https://github.com/opensearch-project/security/pull/1886)) + + diff --git a/scripts/gradle/gradle-check.sh b/scripts/gradle/gradle-check.sh new file mode 100644 index 0000000000..2d0dc522c5 --- /dev/null +++ b/scripts/gradle/gradle-check.sh @@ -0,0 +1,69 @@ +#!/bin/bash + + +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +# This script is used in OpenSearch Core repo github actions +# To trigger Jenkins Gradle Check from a PR + + +JENKINS_URL="https://build.ci.opensearch.org" +TIMEPASS=0 +TIMEOUT=7200 +RESULT="null" +TRIGGER_TOKEN=$1 +PAYLOAD_JSON="{\"pr_from_sha\": \"$pr_from_sha\", \"pr_from_clone_url\": \"$pr_from_clone_url\", \"pr_to_clone_url\": \"$pr_to_clone_url\", \"pr_title\": \"$pr_title\", \"pr_number\": \"$pr_number\"}" + +echo "Trigger Jenkins workflows" +JENKINS_REQ=`curl -s -XPOST \ + -H "Authorization: Bearer $TRIGGER_TOKEN" \ + -H "Content-Type: application/json" \ + "$JENKINS_URL/generic-webhook-trigger/invoke" \ + --data "$(echo $PAYLOAD_JSON)"` + +echo $PAYLOAD_JSON | jq +echo $JENKINS_REQ + +QUEUE_URL=$(echo $JENKINS_REQ | jq --raw-output '.jobs."gradle-check".url') +echo QUEUE_URL $QUEUE_URL +echo "wait for jenkins to start workflow" && sleep 15 + +echo "Check if queue exist in Jenkins after triggering" +if [ -z "$QUEUE_URL" ] || [ "$QUEUE_URL" != "null" ]; then + WORKFLOW_URL=$(curl -s -XGET ${JENKINS_URL}/${QUEUE_URL}api/json | jq --raw-output .executable.url) + echo WORKFLOW_URL $WORKFLOW_URL + + echo "Use queue information to find build number in Jenkins if available" + if [ -z "$WORKFLOW_URL" ] || [ "$WORKFLOW_URL" != "null" ]; then + + RUNNING="true" + + echo "Waiting for Jenkins to complete the run" + while [ "$RUNNING" = "true" ] && [ "$TIMEPASS" -le "$TIMEOUT" ]; do + echo "Still running, wait for another 30 seconds before checking again, max timeout $TIMEOUT" + echo "Jenkins Workflow Url: $WORKFLOW_URL" + TIMEPASS=$(( TIMEPASS + 30 )) && echo time pass: $TIMEPASS + sleep 30 + RUNNING=$(curl -s -XGET ${WORKFLOW_URL}api/json | jq --raw-output .building) + done + + echo "Complete the run, checking results now......" + RESULT=$(curl -s -XGET ${WORKFLOW_URL}api/json | jq --raw-output .result) + + fi +fi + +echo "Please check jenkins url for logs: $WORKFLOW_URL" + +if [ "$RESULT" != "SUCCESS" ]; then + echo "Result: $RESULT" + exit 1 +else + echo "Result: $RESULT" + echo "Get codeCoverage.xml" && curl -SLO ${WORKFLOW_URL}artifact/codeCoverage.xml + echo 0 +fi diff --git a/src/run_sign.py b/src/run_sign.py index e9716c4d7a..89edc6b4e9 100755 --- a/src/run_sign.py +++ b/src/run_sign.py @@ -10,7 +10,6 @@ from sign_workflow.sign_args import SignArgs from sign_workflow.sign_artifacts import SignArtifacts -from sign_workflow.signer import Signer from system import console @@ -24,7 +23,7 @@ def main() -> int: components=args.components, artifact_type=args.type, signature_type=args.sigtype, - signer=Signer() + platform=args.platform ) sign.sign() diff --git a/src/sign_workflow/sign_args.py b/src/sign_workflow/sign_args.py index 742a1e0408..cefa5155f6 100644 --- a/src/sign_workflow/sign_args.py +++ b/src/sign_workflow/sign_args.py @@ -11,7 +11,8 @@ class SignArgs: - ACCEPTED_SIGNATURE_FILE_TYPES = [".sig"] + ACCEPTED_SIGNATURE_FILE_TYPES = [".sig", ".asc"] + ACCEPTED_PLATFORM = ["linux", "windows"] target: Path components: List[str] @@ -25,7 +26,7 @@ def __init__(self) -> None: parser.add_argument("-c", "--component", type=str, nargs='*', dest="components", help="Component or components to sign") parser.add_argument("--type", help="Artifact type") parser.add_argument("--sigtype", choices=self.ACCEPTED_SIGNATURE_FILE_TYPES, help="Type of signature file.", default=".asc") - parser.add_argument("--platform", nargs="?", help="Distribution platform.", default="linux") + parser.add_argument("--platform", choices=self.ACCEPTED_PLATFORM, help="Distribution platform.", default="linux") parser.add_argument( "-v", "--verbose", diff --git a/src/sign_workflow/sign_artifacts.py b/src/sign_workflow/sign_artifacts.py index 7355926d8b..3dbe3ee1c8 100644 --- a/src/sign_workflow/sign_artifacts.py +++ b/src/sign_workflow/sign_artifacts.py @@ -14,6 +14,7 @@ from manifests.build_manifest import BuildManifest from sign_workflow.signer import Signer +from sign_workflow.signers import Signers class SignArtifacts: @@ -21,14 +22,16 @@ class SignArtifacts: component: str artifact_type: str signature_type: str + platform: str signer: Signer - def __init__(self, target: Path, components: List[str], artifact_type: str, signature_type: str, signer: Signer) -> None: + def __init__(self, target: Path, components: List[str], artifact_type: str, signature_type: str, platform: str) -> None: self.target = target self.components = components self.artifact_type = artifact_type self.signature_type = signature_type - self.signer = signer + self.platform = platform + self.signer = Signers.create(platform) @abstractmethod def __sign__(self) -> None: @@ -54,9 +57,9 @@ def __signer_class__(self, path: Path) -> Type[Any]: return SignArtifactsExistingArtifactFile @classmethod - def from_path(self, path: Path, components: List[str], artifact_type: str, signature_type: str, signer: Signer) -> Any: + def from_path(self, path: Path, components: List[str], artifact_type: str, signature_type: str, platform: str) -> Any: klass = self.__signer_class__(path) - return klass(path, components, artifact_type, signature_type, signer) + return klass(path, components, artifact_type, signature_type, platform) class SignWithBuildManifest(SignArtifacts): diff --git a/src/sign_workflow/signer.py b/src/sign_workflow/signer.py index ab9f6e06f0..f68badb5fe 100644 --- a/src/sign_workflow/signer.py +++ b/src/sign_workflow/signer.py @@ -8,22 +8,16 @@ import logging import os +from abc import ABC, abstractmethod from pathlib import Path from typing import List from git.git_repository import GitRepository -""" -This class is responsible for signing an artifact using the OpenSearch-signer-client and verifying its signature. -The signed artifacts will be found in the same location as the original artifacts. -""" - -class Signer: +class Signer(ABC): git_repo: GitRepository - ACCEPTED_FILE_TYPES = [".zip", ".jar", ".war", ".pom", ".module", ".tar.gz", ".whl", ".crate", ".rpm"] - def __init__(self) -> None: self.git_repo = GitRepository(self.get_repo_url(), "HEAD", working_subdirectory="src") self.git_repo.execute("./bootstrap") @@ -42,15 +36,13 @@ def sign_artifacts(self, artifacts: List[str], basepath: Path, signature_type: s continue self.generate_signature_and_verify(artifact, basepath, signature_type) + @abstractmethod def generate_signature_and_verify(self, artifact: str, basepath: Path, signature_type: str) -> None: - location = os.path.join(basepath, artifact) - self.sign(location, signature_type) - self.verify(location + signature_type) + pass + @abstractmethod def is_valid_file_type(self, file_name: str) -> bool: - return any( - file_name.endswith(x) for x in Signer.ACCEPTED_FILE_TYPES - ) + pass def get_repo_url(self) -> str: if "GITHUB_TOKEN" in os.environ: @@ -62,20 +54,6 @@ def __remove_existing_signature__(self, signature_file: str) -> None: logging.warning(f"Removing existing signature file {signature_file}") os.remove(signature_file) - def sign(self, filename: str, signature_type: str) -> None: - signature_file = filename + signature_type - self.__remove_existing_signature__(signature_file) - signing_cmd = [ - "./opensearch-signer-client", - "-i", - filename, - "-o", - signature_file, - "-p", - "pgp", - ] - self.git_repo.execute(" ".join(signing_cmd)) - - def verify(self, filename: str) -> None: - verify_cmd = ["gpg", "--verify-files", filename] - self.git_repo.execute(" ".join(verify_cmd)) + @abstractmethod + def sign(self, artifact: str, basepath: Path, signature_type: str) -> None: + pass diff --git a/src/sign_workflow/signer_pgp.py b/src/sign_workflow/signer_pgp.py new file mode 100644 index 0000000000..18675ddeac --- /dev/null +++ b/src/sign_workflow/signer_pgp.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python + +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +import os +from pathlib import Path + +from sign_workflow.signer import Signer + +""" +This class is responsible for signing an artifact using the OpenSearch-signer-client and verifying its signature. +The signed artifacts will be found in the same location as the original artifacts. +""" + + +class SignerPGP(Signer): + + ACCEPTED_FILE_TYPES = [".zip", ".jar", ".war", ".pom", ".module", ".tar.gz", ".whl", ".crate", ".rpm"] + + def generate_signature_and_verify(self, artifact: str, basepath: Path, signature_type: str) -> None: + location = os.path.join(basepath, artifact) + self.sign(artifact, basepath, signature_type) + self.verify(location + signature_type) + + def is_valid_file_type(self, file_name: str) -> bool: + return any( + file_name.endswith(x) for x in SignerPGP.ACCEPTED_FILE_TYPES + ) + + def sign(self, artifact: str, basepath: Path, signature_type: str) -> None: + filename = os.path.join(basepath, artifact) + signature_file = filename + signature_type + self.__remove_existing_signature__(signature_file) + signing_cmd = [ + "./opensearch-signer-client", + "-i", + filename, + "-o", + signature_file, + "-p", + "pgp", + ] + self.git_repo.execute(" ".join(signing_cmd)) + + def verify(self, filename: str) -> None: + verify_cmd = ["gpg", "--verify-files", filename] + self.git_repo.execute(" ".join(verify_cmd)) diff --git a/src/sign_workflow/signer_windows.py b/src/sign_workflow/signer_windows.py new file mode 100644 index 0000000000..30ece8eb9b --- /dev/null +++ b/src/sign_workflow/signer_windows.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python + +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +import os +from pathlib import Path + +from sign_workflow.signer import Signer + +""" +This class is responsible for signing an artifact using the OpenSearch-signer-client and verifying its signature. +The signed artifacts will be found in the subfolder called signed under the origin location as the original artifacts. +""" + + +class SignerWindows(Signer): + + ACCEPTED_FILE_TYPES = [".msi", ".exe", ".dll", ".sys", ".ps1", ".psm1", ".psd1", ".cat", ".zip"] + + def generate_signature_and_verify(self, artifact: str, basepath: Path, signature_type: str) -> None: + self.sign(artifact, basepath, signature_type) + + def is_valid_file_type(self, file_name: str) -> bool: + return any( + file_name.endswith(x) for x in SignerWindows.ACCEPTED_FILE_TYPES + ) + + def sign(self, artifact: str, basepath: Path, signature_type: str) -> None: + filename = os.path.join(basepath, artifact) + signed_prefix = "signed_" + signature_file = os.path.join(basepath, signed_prefix + artifact) + self.__remove_existing_signature__(signature_file) + signing_cmd = [ + "./opensearch-signer-client", + "-i", + filename, + "-o", + signature_file, + "-p", + "windows", + ] + self.git_repo.execute(" ".join(signing_cmd)) + signed_folder = os.path.join(basepath, "signed") + if not os.path.exists(signed_folder): + os.mkdir(signed_folder) + signed_location = os.path.join(signed_folder, artifact) + os.rename(signature_file, signed_location) diff --git a/src/sign_workflow/signers.py b/src/sign_workflow/signers.py new file mode 100644 index 0000000000..7d27fb57a9 --- /dev/null +++ b/src/sign_workflow/signers.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python + +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + + +from sign_workflow.signer import Signer +from sign_workflow.signer_pgp import SignerPGP +from sign_workflow.signer_windows import SignerWindows + + +class Signers: + TYPES = { + "windows": SignerWindows, + "linux": SignerPGP, + } + + @classmethod + def from_platform(cls, platform: str) -> Signer: + klass = cls.TYPES.get(platform, None) + if not klass: + raise ValueError(f"Unsupported type of platform for signing: {platform}") + return klass # type: ignore[return-value] + + @classmethod + def create(cls, platform: str) -> Signer: + klass = cls.from_platform(platform) + return klass() # type: ignore[no-any-return, operator] diff --git a/tests/jenkins/TestCCRPerfTest.groovy b/tests/jenkins/TestCCRPerfTest.groovy index 9711e3804b..16946aff34 100644 --- a/tests/jenkins/TestCCRPerfTest.groovy +++ b/tests/jenkins/TestCCRPerfTest.groovy @@ -53,7 +53,7 @@ class TestCCRPerfTest extends BuildPipelineTest { assertThat(s3DownloadCommands.size(), equalTo(1)) assertThat(s3DownloadCommands, hasItem( - "{file=config.yml, bucket=test_bucket, path=test_config/config-ccr.yml, force=true}".toString() + "{file=config.yml, bucket=ARTIFACT_BUCKET_NAME, path=test_config/config-ccr.yml, force=true}".toString() )) } diff --git a/tests/jenkins/TestCopyContainer.groovy b/tests/jenkins/TestCopyContainer.groovy index 345fa5a7ec..4ffe56af7c 100644 --- a/tests/jenkins/TestCopyContainer.groovy +++ b/tests/jenkins/TestCopyContainer.groovy @@ -13,7 +13,7 @@ class TestCopyContainer extends BuildPipelineTest { binding.setVariable('ARTIFACT_PROMOTION_ROLE_NAME', 'sample-agent-AssumeRole') binding.setVariable('AWS_ACCOUNT_ARTIFACT', '1234567890') binding.setVariable('DATA_PREPPER_STAGING_CONTAINER_REPOSITORY', 'sample_dataprepper_ecr_url') - helper.registerAllowedMethod('withAWS', [Map, Closure], null) + helper.registerAllowedMethod('withAWS', [Map, Closure], null) super.setUp() } diff --git a/tests/jenkins/TestOpenSearchBwcTest.groovy b/tests/jenkins/TestOpenSearchBwcTest.groovy index b574e62d82..8875885804 100644 --- a/tests/jenkins/TestOpenSearchBwcTest.groovy +++ b/tests/jenkins/TestOpenSearchBwcTest.groovy @@ -29,7 +29,7 @@ class TestOpenSearchBwcTest extends BuildPipelineTest { 'BWC Tests Successful', '', testManifest, - 'INTEG_TEST_WEBHOOK')) + 'jenkins-integ-test-webhook')) super.setUp() // Variables diff --git a/tests/jenkins/TestOpenSearchDashboardsBwcTest.groovy b/tests/jenkins/TestOpenSearchDashboardsBwcTest.groovy index cce6013c9e..2a34130eb2 100644 --- a/tests/jenkins/TestOpenSearchDashboardsBwcTest.groovy +++ b/tests/jenkins/TestOpenSearchDashboardsBwcTest.groovy @@ -29,7 +29,7 @@ class TestOpenSearchDashboardsBwcTest extends BuildPipelineTest { 'BWC Tests Successful', '', testManifest, - 'INTEG_TEST_WEBHOOK')) + 'jenkins-integ-test-webhook')) super.setUp() // Variables diff --git a/tests/jenkins/TestOpenSearchDashboardsIntegTest.groovy b/tests/jenkins/TestOpenSearchDashboardsIntegTest.groovy index ca24389c1b..cd8caa31e0 100644 --- a/tests/jenkins/TestOpenSearchDashboardsIntegTest.groovy +++ b/tests/jenkins/TestOpenSearchDashboardsIntegTest.groovy @@ -29,7 +29,7 @@ class TestOpenSearchDashboardsIntegTest extends BuildPipelineTest { 'Integration Tests Successful', '', testManifest, - 'INTEG_TEST_WEBHOOK')) + 'jenkins-integ-test-webhook')) super.setUp() // Variables diff --git a/tests/jenkins/TestOpenSearchIntegTest.groovy b/tests/jenkins/TestOpenSearchIntegTest.groovy index 3b84bb24fb..6ba5fa5221 100644 --- a/tests/jenkins/TestOpenSearchIntegTest.groovy +++ b/tests/jenkins/TestOpenSearchIntegTest.groovy @@ -29,7 +29,7 @@ class TestOpenSearchIntegTest extends BuildPipelineTest { 'Integration Tests Successful', '', testManifest, - 'INTEG_TEST_WEBHOOK')) + 'jenkins-integ-test-webhook')) super.setUp() // Variables diff --git a/tests/jenkins/TestPromoteArtifacts.groovy b/tests/jenkins/TestPromoteArtifacts.groovy index c2a430d8a5..591b50a463 100644 --- a/tests/jenkins/TestPromoteArtifacts.groovy +++ b/tests/jenkins/TestPromoteArtifacts.groovy @@ -29,28 +29,27 @@ class TestPromoteArtifacts extends BuildPipelineTest { binding.setVariable('PUBLIC_ARTIFACT_URL', 'https://ci.opensearch.org/dbc') binding.setVariable('DISTRIBUTION_JOB_NAME', 'vars-build') - binding.setVariable('ARTIFACT_BUCKET_NAME', 'artifact-bucket') - binding.setVariable('AWS_ACCOUNT_PUBLIC', 'account') binding.setVariable('STAGE_NAME', 'stage') binding.setVariable('BUILD_URL', 'http://jenkins.us-east-1.elb.amazonaws.com/job/vars/42') binding.setVariable('DISTRIBUTION_BUILD_NUMBER', '33') binding.setVariable('DISTRIBUTION_PLATFORM', 'linux') binding.setVariable('DISTRIBUTION_ARCHITECTURE', 'x64') - binding.setVariable('ARTIFACT_DOWNLOAD_ROLE_NAME', 'downloadRoleName') - binding.setVariable('AWS_ACCOUNT_PUBLIC', 'publicAccount') - binding.setVariable('ARTIFACT_PROMOTION_ROLE_NAME', 'artifactPromotionRole') - binding.setVariable('AWS_ACCOUNT_ARTIFACT', 'artifactsAccount') - binding.setVariable('ARTIFACT_PRODUCTION_BUCKET_NAME', 'prod-bucket-name') binding.setVariable('WORKSPACE', 'tests/jenkins') binding.setVariable('GITHUB_BOT_TOKEN_NAME', 'github_bot_token_name') - binding.setVariable('SIGNER_CLIENT_ROLE', 'dummy_signer_client_role') - binding.setVariable('SIGNER_CLIENT_EXTERNAL_ID', 'signer_client_external_id') - binding.setVariable('SIGNER_CLIENT_UNSIGNED_BUCKET', 'signer_client_unsigned_bucket') - binding.setVariable('SIGNER_CLIENT_SIGNED_BUCKET', 'signer_client_signed_bucket') + def configs = ["role": "dummy_role", + "external_id": "dummy_ID", + "unsigned_bucket": "dummy_unsigned_bucket", + "signed_bucket": "dummy_signed_bucket"] + binding.setVariable('configs', configs) + helper.registerAllowedMethod("readJSON", [Map.class], {c -> configs}) helper.registerAllowedMethod("git", [Map]) helper.registerAllowedMethod("s3Download", [Map]) helper.registerAllowedMethod("s3Upload", [Map]) + helper.registerAllowedMethod("withCredentials", [Map, Closure], { args, closure -> + closure.delegate = delegate + return helper.callClosure(closure) + }) helper.registerAllowedMethod("withAWS", [Map, Closure], { args, closure -> closure.delegate = delegate return helper.callClosure(closure) diff --git a/tests/jenkins/TestPromoteContainer.groovy b/tests/jenkins/TestPromoteContainer.groovy new file mode 100644 index 0000000000..b5dcb3f260 --- /dev/null +++ b/tests/jenkins/TestPromoteContainer.groovy @@ -0,0 +1,116 @@ +import jenkins.tests.BuildPipelineTest +import org.junit.Before +import org.junit.Test + +class TestPromoteContainer extends BuildPipelineTest { + + String PROMOTE_PRODUCT = 'opensearch:2.0.1.2901, opensearch-dashboards:2.0.1-2345, data-prepper:2.0.1.123' + String RELEASE_VERSION = '2.0.1' + + @Before + void setUp() { + binding.setVariable('SOURCE_IMAGES', PROMOTE_PRODUCT) + binding.setVariable('RELEASE_VERSION', RELEASE_VERSION) + binding.setVariable('DOCKER_USERNAME', 'dummy_docker_username') + binding.setVariable('DOCKER_PASSWORD', 'dummy_docker_password') + binding.setVariable('ARTIFACT_PROMOTION_ROLE_NAME', 'dummy-agent-AssumeRole') + binding.setVariable('AWS_ACCOUNT_ARTIFACT', '1234567890') + binding.setVariable('DATA_PREPPER_STAGING_CONTAINER_REPOSITORY', 'dummy_dataprepper_ecr_url') + + + helper.registerAllowedMethod('withAWS', [Map, Closure], null) + super.setUp() + + } + + @Test + public void testPromoteContainerToDocker() { + String dockerPromote = true + String ecrPromote = false + String latestBoolean = false + String majorVersionBoolean = false + binding.setVariable('DOCKER_HUB_PROMOTE', dockerPromote) + binding.setVariable('ECR_PROMOTE', ecrPromote) + binding.setVariable('TAG_LATEST', latestBoolean) + binding.setVariable('TAG_MAJOR_VERSION', majorVersionBoolean) + + super.testPipeline("jenkins/promotion/promote-docker-ecr.jenkinsfile", + "tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDocker.jenkinsfile") + } + + @Test + public void testPromoteContainerToDockerLatest() { + String dockerPromote = true + String ecrPromote = false + String latestBoolean = true + String majorVersionBoolean = false + binding.setVariable('DOCKER_HUB_PROMOTE', dockerPromote) + binding.setVariable('ECR_PROMOTE', ecrPromote) + binding.setVariable('TAG_LATEST', latestBoolean) + binding.setVariable('TAG_MAJOR_VERSION', majorVersionBoolean) + + super.testPipeline("jenkins/promotion/promote-docker-ecr.jenkinsfile", + "tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerLatest.jenkinsfile") + } + + @Test + public void testPromoteContainerToDockerMajor() { + String dockerPromote = true + String ecrPromote = false + String latestBoolean = false + String majorVersionBoolean = true + binding.setVariable('DOCKER_HUB_PROMOTE', dockerPromote) + binding.setVariable('ECR_PROMOTE', ecrPromote) + binding.setVariable('TAG_LATEST', latestBoolean) + binding.setVariable('TAG_MAJOR_VERSION', majorVersionBoolean) + + super.testPipeline("jenkins/promotion/promote-docker-ecr.jenkinsfile", + "tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerMajor.jenkinsfile") + } + + @Test + public void testPromoteContainerToDockerLatestMajor() { + String dockerPromote = true + String ecrPromote = false + String latestBoolean = true + String majorVersionBoolean = true + binding.setVariable('DOCKER_HUB_PROMOTE', dockerPromote) + binding.setVariable('ECR_PROMOTE', ecrPromote) + binding.setVariable('TAG_LATEST', latestBoolean) + binding.setVariable('TAG_MAJOR_VERSION', majorVersionBoolean) + + super.testPipeline("jenkins/promotion/promote-docker-ecr.jenkinsfile", + "tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerLatestMajor.jenkinsfile") + } + + @Test + public void testPromoteContainerToECRLatestMajor() { + String dockerPromote = false + String ecrPromote = true + String latestBoolean = true + String majorVersionBoolean = true + binding.setVariable('DOCKER_HUB_PROMOTE', dockerPromote) + binding.setVariable('ECR_PROMOTE', ecrPromote) + binding.setVariable('TAG_LATEST', latestBoolean) + binding.setVariable('TAG_MAJOR_VERSION', majorVersionBoolean) + + super.testPipeline("jenkins/promotion/promote-docker-ecr.jenkinsfile", + "tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToECRLatestMajor.jenkinsfile") + } + + @Test + public void testPromoteContainerToDockerECRLatestMajor() { + String dockerPromote = true + String ecrPromote = true + String latestBoolean = true + String majorVersionBoolean = true + binding.setVariable('DOCKER_HUB_PROMOTE', dockerPromote) + binding.setVariable('ECR_PROMOTE', ecrPromote) + binding.setVariable('TAG_LATEST', latestBoolean) + binding.setVariable('TAG_MAJOR_VERSION', majorVersionBoolean) + + super.testPipeline("jenkins/promotion/promote-docker-ecr.jenkinsfile", + "tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerECRLatestMajor.jenkinsfile") + } + +} diff --git a/tests/jenkins/TestPromoteYumRepos.groovy b/tests/jenkins/TestPromoteYumRepos.groovy index 17307e7744..dc6ff59055 100644 --- a/tests/jenkins/TestPromoteYumRepos.groovy +++ b/tests/jenkins/TestPromoteYumRepos.groovy @@ -20,15 +20,18 @@ class TestPromoteYumRepos extends BuildPipelineTest { super.setUp() binding.setVariable('PUBLIC_ARTIFACT_URL', 'https://ci.opensearch.org/dbc') - binding.setVariable('ARTIFACT_PROMOTION_ROLE_NAME', 'artifactPromotionRole') - binding.setVariable('AWS_ACCOUNT_ARTIFACT', 'artifactsAccount') - binding.setVariable('ARTIFACT_PRODUCTION_BUCKET_NAME', 'prod-bucket-name') binding.setVariable('GITHUB_BOT_TOKEN_NAME', 'github_bot_token_name') - binding.setVariable('SIGNER_CLIENT_ROLE', 'dummy_signer_client_role') - binding.setVariable('SIGNER_CLIENT_EXTERNAL_ID', 'signer_client_external_id') - binding.setVariable('SIGNER_CLIENT_UNSIGNED_BUCKET', 'signer_client_unsigned_bucket') - binding.setVariable('SIGNER_CLIENT_SIGNED_BUCKET', 'signer_client_signed_bucket') + def configs = ["role": "dummy_role", + "external_id": "dummy_ID", + "unsigned_bucket": "dummy_unsigned_bucket", + "signed_bucket": "dummy_signed_bucket"] + binding.setVariable('configs', configs) + helper.registerAllowedMethod("readJSON", [Map.class], {c -> configs}) helper.registerAllowedMethod("git", [Map]) + helper.registerAllowedMethod("withCredentials", [Map, Closure], { args, closure -> + closure.delegate = delegate + return helper.callClosure(closure) + }) helper.registerAllowedMethod("withAWS", [Map, Closure], { args, closure -> closure.delegate = delegate return helper.callClosure(closure) diff --git a/tests/jenkins/TestPublishNotification.groovy b/tests/jenkins/TestPublishNotification.groovy index 7b30d2db5f..1da7357b04 100644 --- a/tests/jenkins/TestPublishNotification.groovy +++ b/tests/jenkins/TestPublishNotification.groovy @@ -17,7 +17,7 @@ class TestPublishNotification extends BuildPipelineTest { void setUp() { this.registerLibTester(new PublishNotificationLibTester( - ':white_check_mark:', 'Successful Build' , 'extra', '1.2.0/opensearch-1.2.0.yml', 'BUILD_NOTICE_WEBHOOK')) + ':white_check_mark:', 'Successful Build' , 'extra', '1.2.0/opensearch-1.2.0.yml', 'jenkins-build-notice-webhook')) super.setUp() } diff --git a/tests/jenkins/TestRunNonSecurityPerfTestScript.groovy b/tests/jenkins/TestRunNonSecurityPerfTestScript.groovy index 36bf2a0495..0457aafdfc 100644 --- a/tests/jenkins/TestRunNonSecurityPerfTestScript.groovy +++ b/tests/jenkins/TestRunNonSecurityPerfTestScript.groovy @@ -56,7 +56,7 @@ class TestRunNonSecurityPerfTestScript extends BuildPipelineTest { assertThat(s3DownloadCommands.size(), equalTo(1)) assertThat(s3DownloadCommands, hasItem( - "{file=config.yml, bucket=test_bucket, path=test_config/config.yml, force=true}".toString() + "{file=config.yml, bucket=ARTIFACT_BUCKET_NAME, path=test_config/config.yml, force=true}".toString() )) } diff --git a/tests/jenkins/TestRunPerfTestScript.groovy b/tests/jenkins/TestRunPerfTestScript.groovy index 6bd1445a8b..3aaa792fb8 100644 --- a/tests/jenkins/TestRunPerfTestScript.groovy +++ b/tests/jenkins/TestRunPerfTestScript.groovy @@ -56,7 +56,7 @@ class TestRunPerfTestScript extends BuildPipelineTest { assertThat(s3DownloadCommands.size(), equalTo(2)) assertThat(s3DownloadCommands, hasItem( - "{file=config.yml, bucket=test_bucket, path=test_config/config.yml, force=true}".toString() + "{file=config.yml, bucket=ARTIFACT_BUCKET_NAME, path=test_config/config.yml, force=true}".toString() )) } diff --git a/tests/jenkins/jenkinsjob-regression-files/cross-cluster-replication/perf-test.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/cross-cluster-replication/perf-test.jenkinsfile.txt index 7a5d5d356f..6cb8863eb2 100644 --- a/tests/jenkins/jenkinsjob-regression-files/cross-cluster-replication/perf-test.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/cross-cluster-replication/perf-test.jenkinsfile.txt @@ -42,8 +42,11 @@ pipenv install "aws-cdk.core~=1.143.0" "aws_cdk.aws_ec2~=1.143.0" "aws_cdk.aws_iam~=1.143.0" pipenv install "boto3~=1.18" "setuptools~=57.4" "retry~=0.9" ) - runPerfTestScript.withAWS({role=opensearch-test, roleAccount=dummy_account, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - runPerfTestScript.s3Download({file=config.yml, bucket=test_bucket, path=test_config/config-ccr.yml, force=true}) + runPerfTestScript.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + runPerfTestScript.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + runPerfTestScript.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME], groovy.lang.Closure) + runPerfTestScript.withAWS({role=opensearch-test, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + runPerfTestScript.s3Download({file=config.yml, bucket=ARTIFACT_BUCKET_NAME, path=test_config/config-ccr.yml, force=true}) runPerfTestScript.usernamePassword({credentialsId=bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) runPerfTestScript.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) runPerfTestScript.sh(./test.sh perf-test --stack test-single-security-1236-x64-perf-test --bundle-manifest tests/jenkins/data/opensearch-1.3.0-bundle.yml --config config.yml --component cross-cluster-replication) @@ -88,8 +91,8 @@ CCR Performance tests for 1236 completed}) perf-test.findFiles({excludes=, glob=messages/*}) perf-test.dir(messages, groovy.lang.Closure) perf-test.deleteDir() - perf-test.publishNotification({icon=:white_check_mark:, message=CCR Performance Tests Successful, extra=, credentialsId=INTEG_TEST_WEBHOOK}) - publishNotification.string({credentialsId=INTEG_TEST_WEBHOOK, variable=WEBHOOK_URL}) + perf-test.publishNotification({icon=:white_check_mark:, message=CCR Performance Tests Successful, extra=, credentialsId=jenkins-integ-test-webhook}) + publishNotification.string({credentialsId=jenkins-integ-test-webhook, variable=WEBHOOK_URL}) publishNotification.withCredentials([WEBHOOK_URL], groovy.lang.Closure) publishNotification.sh(curl -XPOST --header "Content-Type: application/json" --data '{"result_text":":white_check_mark: JOB_NAME=perf-test diff --git a/tests/jenkins/jenkinsjob-regression-files/data-prepper/release-data-prepper-all-artifacts.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/data-prepper/release-data-prepper-all-artifacts.jenkinsfile.txt index cc78959a78..51b480c6ad 100644 --- a/tests/jenkins/jenkinsjob-regression-files/data-prepper/release-data-prepper-all-artifacts.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/data-prepper/release-data-prepper-all-artifacts.jenkinsfile.txt @@ -2,6 +2,11 @@ release-data-prepper-all-artifacts.legacySCM(groovy.lang.Closure) release-data-prepper-all-artifacts.library({identifier=jenkins@20211123, retriever=null}) release-data-prepper-all-artifacts.pipeline(groovy.lang.Closure) + release-data-prepper-all-artifacts.credentials(jenkins-data-prepper-artifact-staging-site) + release-data-prepper-all-artifacts.credentials(jenkins-data-prepper-staging-container-repository) + release-data-prepper-all-artifacts.credentials(jenkins-artifact-promotion-role) + release-data-prepper-all-artifacts.credentials(jenkins-aws-production-account) + release-data-prepper-all-artifacts.credentials(jenkins-artifact-production-bucket-name) release-data-prepper-all-artifacts.timeout({time=1, unit=HOURS}) release-data-prepper-all-artifacts.echo(Executing on agent [label:none]) release-data-prepper-all-artifacts.stage(Download Archives, groovy.lang.Closure) @@ -12,22 +17,26 @@ release-data-prepper-all-artifacts.stage(Sign Archives, groovy.lang.Closure) release-data-prepper-all-artifacts.script(groovy.lang.Closure) release-data-prepper-all-artifacts.signArtifacts({artifactPath=/tmp/workspace/archive, sigtype=.sig, platform=linux}) - signArtifacts.echo(PGP Signature Signing) + signArtifacts.echo(PGP or Windows Signature Signing) signArtifacts.fileExists(/tmp/workspace/sign.sh) signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - /tmp/workspace/sign.sh /tmp/workspace/archive --sigtype=.sig --platform=linux - ) + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + /tmp/workspace/sign.sh /tmp/workspace/archive --sigtype=.sig --platform=linux + ) release-data-prepper-all-artifacts.stage(Release Archives to Production Distribution Bucket, groovy.lang.Closure) release-data-prepper-all-artifacts.script(groovy.lang.Closure) release-data-prepper-all-artifacts.withAWS({role=production-role-name, roleAccount=aws-account-artifact, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) @@ -84,22 +93,26 @@ release-data-prepper-all-artifacts.stage(Sign Maven Artifacts, groovy.lang.Closure) release-data-prepper-all-artifacts.script(groovy.lang.Closure) release-data-prepper-all-artifacts.signArtifacts({artifactPath=/tmp/workspace/maven, type=maven, platform=linux}) - signArtifacts.echo(PGP Signature Signing) + signArtifacts.echo(PGP or Windows Signature Signing) signArtifacts.fileExists(/tmp/workspace/sign.sh) signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - /tmp/workspace/sign.sh /tmp/workspace/maven --type=maven --platform=linux - ) + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + /tmp/workspace/sign.sh /tmp/workspace/maven --type=maven --platform=linux + ) release-data-prepper-all-artifacts.stage(Upload Artifacts to Sonatype, groovy.lang.Closure) release-data-prepper-all-artifacts.script(groovy.lang.Closure) release-data-prepper-all-artifacts.usernamePassword({credentialsId=Sonatype, usernameVariable=SONATYPE_USERNAME, passwordVariable=SONATYPE_PASSWORD}) diff --git a/tests/jenkins/jenkinsjob-regression-files/maven-sign-release/maven-sign-release.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/maven-sign-release/maven-sign-release.jenkinsfile.txt index 60f91148c8..830516da81 100644 --- a/tests/jenkins/jenkinsjob-regression-files/maven-sign-release/maven-sign-release.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/maven-sign-release/maven-sign-release.jenkinsfile.txt @@ -2,31 +2,38 @@ maven-sign-release.legacySCM(groovy.lang.Closure) maven-sign-release.library({identifier=jenkins@20211123, retriever=null}) maven-sign-release.pipeline(groovy.lang.Closure) + maven-sign-release.credentials(jenkins-artifact-bucket-name) maven-sign-release.echo(Executing on agent [docker:[image:opensearchstaging/ci-runner:centos7-x64-arm64-jdkmulti-node10.24.1-cypress6.9.1-20211130, reuseNode:false, stages:[:], args:, alwaysPull:true, containerPerStageRoot:false, label:Jenkins-Agent-al2-x64-c54xlarge-Docker-Host]]) maven-sign-release.stage(sign, groovy.lang.Closure) maven-sign-release.script(groovy.lang.Closure) maven-sign-release.echo(Downloading from S3.) maven-sign-release.downloadFromS3({destPath=/tmp/workspace/artifacts, bucket=job-s3-bucket-name, path=distribution-build-opensearch/1.0.0/123/linux/x64/builds/, force=true}) - downloadFromS3.withAWS({role=Dummy_Download_Role, roleAccount=dummy_account, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - downloadFromS3.s3Download({file=/tmp/workspace/artifacts, bucket=job-s3-bucket-name, path=distribution-build-opensearch/1.0.0/123/linux/x64/builds/, force=true}) + downloadFromS3.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + downloadFromS3.withCredentials([AWS_ACCOUNT_PUBLIC], groovy.lang.Closure) + downloadFromS3.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + downloadFromS3.s3Download({file=/tmp/workspace/artifacts, bucket=job-s3-bucket-name, path=distribution-build-opensearch/1.0.0/123/linux/x64/builds/, force=true}) maven-sign-release.echo(Signing Maven artifacts.) maven-sign-release.signArtifacts({artifactPath=/tmp/workspace/artifacts/distribution-build-opensearch/1.0.0/123/linux/x64/builds/opensearch/manifest.yml, type=maven, platform=linux}) - signArtifacts.echo(PGP Signature Signing) + signArtifacts.echo(PGP or Windows Signature Signing) signArtifacts.fileExists(/tmp/workspace/sign.sh) signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - /tmp/workspace/sign.sh /tmp/workspace/artifacts/distribution-build-opensearch/1.0.0/123/linux/x64/builds/opensearch/manifest.yml --type=maven --platform=linux - ) + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + /tmp/workspace/sign.sh /tmp/workspace/artifacts/distribution-build-opensearch/1.0.0/123/linux/x64/builds/opensearch/manifest.yml --type=maven --platform=linux + ) maven-sign-release.stage(stage maven artifacts, groovy.lang.Closure) maven-sign-release.script(groovy.lang.Closure) maven-sign-release.usernamePassword({credentialsId=Sonatype, usernameVariable=SONATYPE_USERNAME, passwordVariable=SONATYPE_PASSWORD}) diff --git a/tests/jenkins/jenkinsjob-regression-files/opensearch-dashboards/bwc-test.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/opensearch-dashboards/bwc-test.jenkinsfile.txt index ade86a86b2..5fb15bcf82 100644 --- a/tests/jenkins/jenkinsjob-regression-files/opensearch-dashboards/bwc-test.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/opensearch-dashboards/bwc-test.jenkinsfile.txt @@ -67,8 +67,8 @@ bwc-test.findFiles({excludes=, glob=messages/*}) bwc-test.dir(messages, groovy.lang.Closure) bwc-test.deleteDir() - bwc-test.publishNotification({icon=:white_check_mark:, message=BWC Tests Successful, extra=, credentialsId=INTEG_TEST_WEBHOOK, manifest=tests/jenkins/data/opensearch-dashboards-1.2.0-test.yml}) - publishNotification.string({credentialsId=INTEG_TEST_WEBHOOK, variable=WEBHOOK_URL}) + bwc-test.publishNotification({icon=:white_check_mark:, message=BWC Tests Successful, extra=, credentialsId=jenkins-integ-test-webhook, manifest=tests/jenkins/data/opensearch-dashboards-1.2.0-test.yml}) + publishNotification.string({credentialsId=jenkins-integ-test-webhook, variable=WEBHOOK_URL}) publishNotification.withCredentials([WEBHOOK_URL], groovy.lang.Closure) publishNotification.sh(curl -XPOST --header "Content-Type: application/json" --data '{"result_text":":white_check_mark: JOB_NAME=dummy_job diff --git a/tests/jenkins/jenkinsjob-regression-files/opensearch-dashboards/integ-test.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/opensearch-dashboards/integ-test.jenkinsfile.txt index 58e851f954..7d3a66ecf2 100644 --- a/tests/jenkins/jenkinsjob-regression-files/opensearch-dashboards/integ-test.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/opensearch-dashboards/integ-test.jenkinsfile.txt @@ -69,8 +69,8 @@ integ-test.findFiles({excludes=, glob=messages/*}) integ-test.dir(messages, groovy.lang.Closure) integ-test.deleteDir() - integ-test.publishNotification({icon=:white_check_mark:, message=Integration Tests Successful, extra=, credentialsId=INTEG_TEST_WEBHOOK, manifest=tests/jenkins/data/opensearch-dashboards-1.2.0-test.yml}) - publishNotification.string({credentialsId=INTEG_TEST_WEBHOOK, variable=WEBHOOK_URL}) + integ-test.publishNotification({icon=:white_check_mark:, message=Integration Tests Successful, extra=, credentialsId=jenkins-integ-test-webhook, manifest=tests/jenkins/data/opensearch-dashboards-1.2.0-test.yml}) + publishNotification.string({credentialsId=jenkins-integ-test-webhook, variable=WEBHOOK_URL}) publishNotification.withCredentials([WEBHOOK_URL], groovy.lang.Closure) publishNotification.sh(curl -XPOST --header "Content-Type: application/json" --data '{"result_text":":white_check_mark: JOB_NAME=dummy_job diff --git a/tests/jenkins/jenkinsjob-regression-files/opensearch/bwc-test.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/opensearch/bwc-test.jenkinsfile.txt index b60d26a3fe..02ced5faaa 100644 --- a/tests/jenkins/jenkinsjob-regression-files/opensearch/bwc-test.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/opensearch/bwc-test.jenkinsfile.txt @@ -67,8 +67,8 @@ bwc-test.findFiles({excludes=, glob=messages/*}) bwc-test.dir(messages, groovy.lang.Closure) bwc-test.deleteDir() - bwc-test.publishNotification({icon=:white_check_mark:, message=BWC Tests Successful, extra=, credentialsId=INTEG_TEST_WEBHOOK, manifest=tests/jenkins/data/opensearch-1.3.0-test.yml}) - publishNotification.string({credentialsId=INTEG_TEST_WEBHOOK, variable=WEBHOOK_URL}) + bwc-test.publishNotification({icon=:white_check_mark:, message=BWC Tests Successful, extra=, credentialsId=jenkins-integ-test-webhook, manifest=tests/jenkins/data/opensearch-1.3.0-test.yml}) + publishNotification.string({credentialsId=jenkins-integ-test-webhook, variable=WEBHOOK_URL}) publishNotification.withCredentials([WEBHOOK_URL], groovy.lang.Closure) publishNotification.sh(curl -XPOST --header "Content-Type: application/json" --data '{"result_text":":white_check_mark: JOB_NAME=dummy_job diff --git a/tests/jenkins/jenkinsjob-regression-files/opensearch/integ-test.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/opensearch/integ-test.jenkinsfile.txt index 6a60804c17..9256a2f753 100644 --- a/tests/jenkins/jenkinsjob-regression-files/opensearch/integ-test.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/opensearch/integ-test.jenkinsfile.txt @@ -79,8 +79,8 @@ integ-test.findFiles({excludes=, glob=messages/*}) integ-test.dir(messages, groovy.lang.Closure) integ-test.deleteDir() - integ-test.publishNotification({icon=:white_check_mark:, message=Integration Tests Successful, extra=, credentialsId=INTEG_TEST_WEBHOOK, manifest=tests/jenkins/data/opensearch-1.3.0-test.yml}) - publishNotification.string({credentialsId=INTEG_TEST_WEBHOOK, variable=WEBHOOK_URL}) + integ-test.publishNotification({icon=:white_check_mark:, message=Integration Tests Successful, extra=, credentialsId=jenkins-integ-test-webhook, manifest=tests/jenkins/data/opensearch-1.3.0-test.yml}) + publishNotification.string({credentialsId=jenkins-integ-test-webhook, variable=WEBHOOK_URL}) publishNotification.withCredentials([WEBHOOK_URL], groovy.lang.Closure) publishNotification.sh(curl -XPOST --header "Content-Type: application/json" --data '{"result_text":":white_check_mark: JOB_NAME=dummy_job diff --git a/tests/jenkins/jenkinsjob-regression-files/opensearch/perf-test-with-security.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/opensearch/perf-test-with-security.jenkinsfile.txt index 83754a284b..45cc7a3b11 100644 --- a/tests/jenkins/jenkinsjob-regression-files/opensearch/perf-test-with-security.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/opensearch/perf-test-with-security.jenkinsfile.txt @@ -42,8 +42,11 @@ pipenv install "aws-cdk.core~=1.143.0" "aws_cdk.aws_ec2~=1.143.0" "aws_cdk.aws_iam~=1.143.0" pipenv install "boto3~=1.18" "setuptools~=57.4" "retry~=0.9" ) - runPerfTestScript.withAWS({role=opensearch-test, roleAccount=dummy_account, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - runPerfTestScript.s3Download({file=config.yml, bucket=test_bucket, path=test_config/config.yml, force=true}) + runPerfTestScript.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + runPerfTestScript.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + runPerfTestScript.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME], groovy.lang.Closure) + runPerfTestScript.withAWS({role=opensearch-test, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + runPerfTestScript.s3Download({file=config.yml, bucket=ARTIFACT_BUCKET_NAME, path=test_config/config.yml, force=true}) runPerfTestScript.usernamePassword({credentialsId=bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) runPerfTestScript.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) runPerfTestScript.sh(./test.sh perf-test --stack test-single-security-1236-x64-perf-test --bundle-manifest tests/jenkins/data/opensearch-1.3.0-bundle.yml --config config.yml --workload nyc_taxis --test-iters 1 --warmup-iters 1 ) @@ -102,8 +105,11 @@ Performance tests with security for 1236 completed}) pipenv install "aws-cdk.core~=1.143.0" "aws_cdk.aws_ec2~=1.143.0" "aws_cdk.aws_iam~=1.143.0" pipenv install "boto3~=1.18" "setuptools~=57.4" "retry~=0.9" ) - runPerfTestScript.withAWS({role=opensearch-test, roleAccount=dummy_account, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - runPerfTestScript.s3Download({file=config.yml, bucket=test_bucket, path=test_config/config.yml, force=true}) + runPerfTestScript.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + runPerfTestScript.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + runPerfTestScript.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME], groovy.lang.Closure) + runPerfTestScript.withAWS({role=opensearch-test, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + runPerfTestScript.s3Download({file=config.yml, bucket=ARTIFACT_BUCKET_NAME, path=test_config/config.yml, force=true}) runPerfTestScript.usernamePassword({credentialsId=bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) runPerfTestScript.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) runPerfTestScript.sh(./test.sh perf-test --stack test-single-1236-x64-perf-test --bundle-manifest tests/jenkins/data/opensearch-1.3.0-bundle.yml --config config.yml --without-security --workload nyc_taxis --test-iters 1 --warmup-iters 1 ) @@ -148,8 +154,8 @@ Performance tests without security for 1236 completed}) perf-test.findFiles({excludes=, glob=messages/*}) perf-test.dir(messages, groovy.lang.Closure) perf-test.deleteDir() - perf-test.publishNotification({icon=:white_check_mark:, message=Performance Tests Successful, extra=, credentialsId=INTEG_TEST_WEBHOOK}) - publishNotification.string({credentialsId=INTEG_TEST_WEBHOOK, variable=WEBHOOK_URL}) + perf-test.publishNotification({icon=:white_check_mark:, message=Performance Tests Successful, extra=, credentialsId=jenkins-integ-test-webhook}) + publishNotification.string({credentialsId=jenkins-integ-test-webhook, variable=WEBHOOK_URL}) publishNotification.withCredentials([WEBHOOK_URL], groovy.lang.Closure) publishNotification.sh(curl -XPOST --header "Content-Type: application/json" --data '{"result_text":":white_check_mark: JOB_NAME=perf-test diff --git a/tests/jenkins/jenkinsjob-regression-files/opensearch/perf-test.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/opensearch/perf-test.jenkinsfile.txt index 514d265b0b..9700aeaaa8 100644 --- a/tests/jenkins/jenkinsjob-regression-files/opensearch/perf-test.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/opensearch/perf-test.jenkinsfile.txt @@ -43,8 +43,11 @@ pipenv install "aws-cdk.core~=1.143.0" "aws_cdk.aws_ec2~=1.143.0" "aws_cdk.aws_iam~=1.143.0" pipenv install "boto3~=1.18" "setuptools~=57.4" "retry~=0.9" ) - runPerfTestScript.withAWS({role=opensearch-test, roleAccount=dummy_account, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - runPerfTestScript.s3Download({file=config.yml, bucket=test_bucket, path=test_config/config.yml, force=true}) + runPerfTestScript.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + runPerfTestScript.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + runPerfTestScript.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME], groovy.lang.Closure) + runPerfTestScript.withAWS({role=opensearch-test, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + runPerfTestScript.s3Download({file=config.yml, bucket=ARTIFACT_BUCKET_NAME, path=test_config/config.yml, force=true}) runPerfTestScript.usernamePassword({credentialsId=bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) runPerfTestScript.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) runPerfTestScript.sh(./test.sh perf-test --stack test-single-1236-x64-perf-test --bundle-manifest tests/jenkins/data/opensearch-1.3.0-non-security-bundle.yml --config config.yml --without-security --workload nyc_taxis --test-iters 1 --warmup-iters 1 ) @@ -89,8 +92,8 @@ Performance tests without security for 1236 completed}) perf-test.findFiles({excludes=, glob=messages/*}) perf-test.dir(messages, groovy.lang.Closure) perf-test.deleteDir() - perf-test.publishNotification({icon=:white_check_mark:, message=Performance Tests Successful, extra=, credentialsId=INTEG_TEST_WEBHOOK}) - publishNotification.string({credentialsId=INTEG_TEST_WEBHOOK, variable=WEBHOOK_URL}) + perf-test.publishNotification({icon=:white_check_mark:, message=Performance Tests Successful, extra=, credentialsId=jenkins-integ-test-webhook}) + publishNotification.string({credentialsId=jenkins-integ-test-webhook, variable=WEBHOOK_URL}) publishNotification.withCredentials([WEBHOOK_URL], groovy.lang.Closure) publishNotification.sh(curl -XPOST --header "Content-Type: application/json" --data '{"result_text":":white_check_mark: JOB_NAME=perf-test diff --git a/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDocker.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDocker.jenkinsfile.txt new file mode 100644 index 0000000000..82a4172d01 --- /dev/null +++ b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDocker.jenkinsfile.txt @@ -0,0 +1,32 @@ + promote-docker-ecr.run() + promote-docker-ecr.legacySCM(groovy.lang.Closure) + promote-docker-ecr.library({identifier=jenkins@20211123, retriever=null}) + promote-docker-ecr.pipeline(groovy.lang.Closure) + promote-docker-ecr.timeout({time=1, unit=HOURS}) + promote-docker-ecr.echo(Executing on agent [docker:[image:opensearchstaging/ci-runner:ubuntu2004-x64-docker-buildx0.6.3-qemu5.0-awscli1.22-jdk11-v2, reuseNode:false, stages:[:], args:-u root -v /var/run/docker.sock:/var/run/docker.sock, alwaysPull:false, containerPerStageRoot:false, label:Jenkins-Agent-al2-x64-c54xlarge-Docker-Host]]) + promote-docker-ecr.stage(Parameters Check, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.stage(image-promote-to-prod, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.promoteContainer({imageRepository=opensearch:2.0.1.2901, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=false, majorVersionTag=false}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=opensearch-dashboards:2.0.1-2345, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=false, majorVersionTag=false}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=data-prepper:2.0.1.123, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=false, majorVersionTag=false}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.postCleanup() + postCleanup.cleanWs({disableDeferredWipeout=true, deleteDirs=true}) + promote-docker-ecr.sh(docker logout) diff --git a/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerECRLatestMajor.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerECRLatestMajor.jenkinsfile.txt new file mode 100644 index 0000000000..71fd2d49fe --- /dev/null +++ b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerECRLatestMajor.jenkinsfile.txt @@ -0,0 +1,107 @@ + promote-docker-ecr.run() + promote-docker-ecr.legacySCM(groovy.lang.Closure) + promote-docker-ecr.library({identifier=jenkins@20211123, retriever=null}) + promote-docker-ecr.pipeline(groovy.lang.Closure) + promote-docker-ecr.timeout({time=1, unit=HOURS}) + promote-docker-ecr.echo(Executing on agent [docker:[image:opensearchstaging/ci-runner:ubuntu2004-x64-docker-buildx0.6.3-qemu5.0-awscli1.22-jdk11-v2, reuseNode:false, stages:[:], args:-u root -v /var/run/docker.sock:/var/run/docker.sock, alwaysPull:false, containerPerStageRoot:false, label:Jenkins-Agent-al2-x64-c54xlarge-Docker-Host]]) + promote-docker-ecr.stage(Parameters Check, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.stage(image-promote-to-prod, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.promoteContainer({imageRepository=opensearch:2.0.1.2901, version=2.0.1, dockerPromote=true, ecrPromote=true, latestTag=true, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=opensearch-dashboards:2.0.1-2345, version=2.0.1, dockerPromote=true, ecrPromote=true, latestTag=true, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=data-prepper:2.0.1.123, version=2.0.1, dockerPromote=true, ecrPromote=true, latestTag=true, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.postCleanup() + postCleanup.cleanWs({disableDeferredWipeout=true, deleteDirs=true}) + promote-docker-ecr.sh(docker logout) diff --git a/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerLatest.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerLatest.jenkinsfile.txt new file mode 100644 index 0000000000..07f3414b4c --- /dev/null +++ b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerLatest.jenkinsfile.txt @@ -0,0 +1,47 @@ + promote-docker-ecr.run() + promote-docker-ecr.legacySCM(groovy.lang.Closure) + promote-docker-ecr.library({identifier=jenkins@20211123, retriever=null}) + promote-docker-ecr.pipeline(groovy.lang.Closure) + promote-docker-ecr.timeout({time=1, unit=HOURS}) + promote-docker-ecr.echo(Executing on agent [docker:[image:opensearchstaging/ci-runner:ubuntu2004-x64-docker-buildx0.6.3-qemu5.0-awscli1.22-jdk11-v2, reuseNode:false, stages:[:], args:-u root -v /var/run/docker.sock:/var/run/docker.sock, alwaysPull:false, containerPerStageRoot:false, label:Jenkins-Agent-al2-x64-c54xlarge-Docker-Host]]) + promote-docker-ecr.stage(Parameters Check, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.stage(image-promote-to-prod, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.promoteContainer({imageRepository=opensearch:2.0.1.2901, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=true, majorVersionTag=false}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=opensearch-dashboards:2.0.1-2345, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=true, majorVersionTag=false}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=data-prepper:2.0.1.123, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=true, majorVersionTag=false}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.postCleanup() + postCleanup.cleanWs({disableDeferredWipeout=true, deleteDirs=true}) + promote-docker-ecr.sh(docker logout) diff --git a/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerLatestMajor.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerLatestMajor.jenkinsfile.txt new file mode 100644 index 0000000000..2fa321661e --- /dev/null +++ b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerLatestMajor.jenkinsfile.txt @@ -0,0 +1,62 @@ + promote-docker-ecr.run() + promote-docker-ecr.legacySCM(groovy.lang.Closure) + promote-docker-ecr.library({identifier=jenkins@20211123, retriever=null}) + promote-docker-ecr.pipeline(groovy.lang.Closure) + promote-docker-ecr.timeout({time=1, unit=HOURS}) + promote-docker-ecr.echo(Executing on agent [docker:[image:opensearchstaging/ci-runner:ubuntu2004-x64-docker-buildx0.6.3-qemu5.0-awscli1.22-jdk11-v2, reuseNode:false, stages:[:], args:-u root -v /var/run/docker.sock:/var/run/docker.sock, alwaysPull:false, containerPerStageRoot:false, label:Jenkins-Agent-al2-x64-c54xlarge-Docker-Host]]) + promote-docker-ecr.stage(Parameters Check, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.stage(image-promote-to-prod, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.promoteContainer({imageRepository=opensearch:2.0.1.2901, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=true, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=opensearch-dashboards:2.0.1-2345, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=true, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=data-prepper:2.0.1.123, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=true, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.postCleanup() + postCleanup.cleanWs({disableDeferredWipeout=true, deleteDirs=true}) + promote-docker-ecr.sh(docker logout) diff --git a/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerMajor.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerMajor.jenkinsfile.txt new file mode 100644 index 0000000000..495809ec10 --- /dev/null +++ b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToDockerMajor.jenkinsfile.txt @@ -0,0 +1,47 @@ + promote-docker-ecr.run() + promote-docker-ecr.legacySCM(groovy.lang.Closure) + promote-docker-ecr.library({identifier=jenkins@20211123, retriever=null}) + promote-docker-ecr.pipeline(groovy.lang.Closure) + promote-docker-ecr.timeout({time=1, unit=HOURS}) + promote-docker-ecr.echo(Executing on agent [docker:[image:opensearchstaging/ci-runner:ubuntu2004-x64-docker-buildx0.6.3-qemu5.0-awscli1.22-jdk11-v2, reuseNode:false, stages:[:], args:-u root -v /var/run/docker.sock:/var/run/docker.sock, alwaysPull:false, containerPerStageRoot:false, label:Jenkins-Agent-al2-x64-c54xlarge-Docker-Host]]) + promote-docker-ecr.stage(Parameters Check, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.stage(image-promote-to-prod, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.promoteContainer({imageRepository=opensearch:2.0.1.2901, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=false, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=opensearch-dashboards:2.0.1-2345, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=false, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=data-prepper:2.0.1.123, version=2.0.1, dockerPromote=true, ecrPromote=false, latestTag=false, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.postCleanup() + postCleanup.cleanWs({disableDeferredWipeout=true, deleteDirs=true}) + promote-docker-ecr.sh(docker logout) diff --git a/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToECRLatestMajor.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToECRLatestMajor.jenkinsfile.txt new file mode 100644 index 0000000000..9a20ce0fad --- /dev/null +++ b/tests/jenkins/jenkinsjob-regression-files/promotion/promote-container/promote-container-testPromoteContainerToECRLatestMajor.jenkinsfile.txt @@ -0,0 +1,62 @@ + promote-docker-ecr.run() + promote-docker-ecr.legacySCM(groovy.lang.Closure) + promote-docker-ecr.library({identifier=jenkins@20211123, retriever=null}) + promote-docker-ecr.pipeline(groovy.lang.Closure) + promote-docker-ecr.timeout({time=1, unit=HOURS}) + promote-docker-ecr.echo(Executing on agent [docker:[image:opensearchstaging/ci-runner:ubuntu2004-x64-docker-buildx0.6.3-qemu5.0-awscli1.22-jdk11-v2, reuseNode:false, stages:[:], args:-u root -v /var/run/docker.sock:/var/run/docker.sock, alwaysPull:false, containerPerStageRoot:false, label:Jenkins-Agent-al2-x64-c54xlarge-Docker-Host]]) + promote-docker-ecr.stage(Parameters Check, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.stage(image-promote-to-prod, groovy.lang.Closure) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.promoteContainer({imageRepository=opensearch:2.0.1.2901, version=2.0.1, dockerPromote=false, ecrPromote=true, latestTag=true, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch:2.0.1.2901}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=opensearch-dashboards:2.0.1-2345, version=2.0.1, dockerPromote=false, ecrPromote=true, latestTag=true, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=opensearchstaging}) + promoteContainer.string({name=SOURCE_IMAGE, value=opensearch-dashboards:2.0.1-2345}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=opensearch-dashboards:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.promoteContainer({imageRepository=data-prepper:2.0.1.123, version=2.0.1, dockerPromote=false, ecrPromote=true, latestTag=true, majorVersionTag=true}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2.0.1}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:2}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promoteContainer.string({name=SOURCE_IMAGE_REGISTRY, value=dummy_dataprepper_ecr_url}) + promoteContainer.string({name=SOURCE_IMAGE, value=data-prepper:2.0.1.123}) + promoteContainer.string({name=DESTINATION_IMAGE_REGISTRY, value=public.ecr.aws/opensearchproject}) + promoteContainer.string({name=DESTINATION_IMAGE, value=data-prepper:latest}) + promoteContainer.build({job=docker-copy, parameters=[null, null, null, null]}) + promote-docker-ecr.script(groovy.lang.Closure) + promote-docker-ecr.postCleanup() + postCleanup.cleanWs({disableDeferredWipeout=true, deleteDirs=true}) + promote-docker-ecr.sh(docker logout) diff --git a/tests/jenkins/jenkinsjob-regression-files/release-tag/release-tag-dashboards.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/release-tag/release-tag-dashboards.jenkinsfile.txt index 3c3235566b..c2dc7861a3 100644 --- a/tests/jenkins/jenkinsjob-regression-files/release-tag/release-tag-dashboards.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/release-tag/release-tag-dashboards.jenkinsfile.txt @@ -12,20 +12,20 @@ createReleaseTag.legacySCM(groovy.lang.Closure) createReleaseTag.library({identifier=jenkins@20211123, retriever=null}) createReleaseTag.readYaml({file=tests/jenkins/data/opensearch-dashboards-bundle-2.0.0-rc1.yml}) - BuildManifest.asBoolean() - BuildManifest.getNames() + BundleManifest.asBoolean() + BundleManifest.getNames() createReleaseTag.echo(Creating 2.0.0-rc1 release tag for 11 components in the manifest) createReleaseTag.usernamePassword({credentialsId=dummy_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) createReleaseTag.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - BuildManifest.getCommitId(ganttChartDashboards) - BuildManifest.getRepo(ganttChartDashboards) + BundleManifest.getCommitId(ganttChartDashboards) + BundleManifest.getRepo(ganttChartDashboards) createReleaseTag.echo(Tagging ganttChartDashboards at 69c54344eebb48d56deca823732954c27a10211c ...) createReleaseTag.dir(ganttChartDashboards, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=69c54344eebb48d56deca823732954c27a10211c}], userRemoteConfigs=[{url=https://github.com/opensearch-project/dashboards-visualizations.git}]}) createReleaseTag.sh({script=git ls-remote --tags https://github.com/opensearch-project/dashboards-visualizations.git 2.0.0.0-rc1 | awk 'NR==1{print $1}', returnStdout=true}) createReleaseTag.echo(Tag 2.0.0.0-rc1 has been created with identical commit ID. Skipping creating new tag for ganttChartDashboards.) - BuildManifest.getCommitId(indexManagementDashboards) - BuildManifest.getRepo(indexManagementDashboards) + BundleManifest.getCommitId(indexManagementDashboards) + BundleManifest.getRepo(indexManagementDashboards) createReleaseTag.echo(Tagging indexManagementDashboards at a39d9d307e9fbbaf6f23aefe794aa23022a68fa0 ...) createReleaseTag.dir(indexManagementDashboards, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=a39d9d307e9fbbaf6f23aefe794aa23022a68fa0}], userRemoteConfigs=[{url=https://github.com/opensearch-project/index-management-dashboards-plugin.git}]}) @@ -34,8 +34,8 @@ createReleaseTag.sh(git tag 2.0.0.0-rc1) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/index-management-dashboards-plugin.git 2.0.0.0-rc1, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/index-management-dashboards-plugin.git 2.0.0.0-rc1) - BuildManifest.getCommitId(anomalyDetectionDashboards) - BuildManifest.getRepo(anomalyDetectionDashboards) + BundleManifest.getCommitId(anomalyDetectionDashboards) + BundleManifest.getRepo(anomalyDetectionDashboards) createReleaseTag.echo(Tagging anomalyDetectionDashboards at 3324c01f66ec9919cc6d69420c37f5687312f9c4 ...) createReleaseTag.dir(anomalyDetectionDashboards, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=3324c01f66ec9919cc6d69420c37f5687312f9c4}], userRemoteConfigs=[{url=https://github.com/opensearch-project/anomaly-detection-dashboards-plugin}]}) @@ -44,8 +44,8 @@ createReleaseTag.sh(git tag 2.0.0.0-rc1) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/anomaly-detection-dashboards-plugin 2.0.0.0-rc1, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/anomaly-detection-dashboards-plugin 2.0.0.0-rc1) - BuildManifest.getCommitId(OpenSearch-Dashboards) - BuildManifest.getRepo(OpenSearch-Dashboards) + BundleManifest.getCommitId(OpenSearch-Dashboards) + BundleManifest.getRepo(OpenSearch-Dashboards) createReleaseTag.echo(Tagging OpenSearch-Dashboards at 23ee797c0542271a36746e0beea33833cd7396ba ...) createReleaseTag.dir(OpenSearch-Dashboards, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=23ee797c0542271a36746e0beea33833cd7396ba}], userRemoteConfigs=[{url=https://github.com/opensearch-project/OpenSearch-Dashboards.git}]}) @@ -54,8 +54,8 @@ createReleaseTag.sh(git tag 2.0.0-rc1) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/OpenSearch-Dashboards.git 2.0.0-rc1, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/OpenSearch-Dashboards.git 2.0.0-rc1) - BuildManifest.getCommitId(notificationsDashboards) - BuildManifest.getRepo(notificationsDashboards) + BundleManifest.getCommitId(notificationsDashboards) + BundleManifest.getRepo(notificationsDashboards) createReleaseTag.echo(Tagging notificationsDashboards at eb3af31759668a94727950d081e8a3a161f22918 ...) createReleaseTag.dir(notificationsDashboards, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=eb3af31759668a94727950d081e8a3a161f22918}], userRemoteConfigs=[{url=https://github.com/opensearch-project/notifications.git}]}) @@ -64,8 +64,8 @@ createReleaseTag.sh(git tag 2.0.0.0-rc1) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/notifications.git 2.0.0.0-rc1, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/notifications.git 2.0.0.0-rc1) - BuildManifest.getCommitId(securityDashboards) - BuildManifest.getRepo(securityDashboards) + BundleManifest.getCommitId(securityDashboards) + BundleManifest.getRepo(securityDashboards) createReleaseTag.echo(Tagging securityDashboards at 7420eb2b5115ad29c40a49de9b5acb42dd26bdc7 ...) createReleaseTag.dir(securityDashboards, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=7420eb2b5115ad29c40a49de9b5acb42dd26bdc7}], userRemoteConfigs=[{url=https://github.com/opensearch-project/security-dashboards-plugin.git}]}) @@ -74,8 +74,8 @@ createReleaseTag.sh(git tag 2.0.0.0-rc1) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/security-dashboards-plugin.git 2.0.0.0-rc1, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/security-dashboards-plugin.git 2.0.0.0-rc1) - BuildManifest.getCommitId(functionalTestDashboards) - BuildManifest.getRepo(functionalTestDashboards) + BundleManifest.getCommitId(functionalTestDashboards) + BundleManifest.getRepo(functionalTestDashboards) createReleaseTag.echo(Tagging functionalTestDashboards at ec2da5c44634f40dc97c3fdac4e85ff7139091b3 ...) createReleaseTag.dir(functionalTestDashboards, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=ec2da5c44634f40dc97c3fdac4e85ff7139091b3}], userRemoteConfigs=[{url=https://github.com/opensearch-project/opensearch-dashboards-functional-test.git}]}) @@ -84,8 +84,8 @@ createReleaseTag.sh(git tag 2.0.0-rc1) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/opensearch-dashboards-functional-test.git 2.0.0-rc1, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/opensearch-dashboards-functional-test.git 2.0.0-rc1) - BuildManifest.getCommitId(alertingDashboards) - BuildManifest.getRepo(alertingDashboards) + BundleManifest.getCommitId(alertingDashboards) + BundleManifest.getRepo(alertingDashboards) createReleaseTag.echo(Tagging alertingDashboards at de11dd89cbdd431f5073901a74907125f1133baf ...) createReleaseTag.dir(alertingDashboards, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=de11dd89cbdd431f5073901a74907125f1133baf}], userRemoteConfigs=[{url=https://github.com/opensearch-project/alerting-dashboards-plugin.git}]}) @@ -94,8 +94,8 @@ createReleaseTag.sh(git tag 2.0.0.0-rc1) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/alerting-dashboards-plugin.git 2.0.0.0-rc1, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/alerting-dashboards-plugin.git 2.0.0.0-rc1) - BuildManifest.getCommitId(queryWorkbenchDashboards) - BuildManifest.getRepo(queryWorkbenchDashboards) + BundleManifest.getCommitId(queryWorkbenchDashboards) + BundleManifest.getRepo(queryWorkbenchDashboards) createReleaseTag.echo(Tagging queryWorkbenchDashboards at eb65d2d956872cb13c7cca340fff679468b86074 ...) createReleaseTag.dir(queryWorkbenchDashboards, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=eb65d2d956872cb13c7cca340fff679468b86074}], userRemoteConfigs=[{url=https://github.com/opensearch-project/sql.git}]}) @@ -104,8 +104,8 @@ createReleaseTag.sh(git tag 2.0.0.0-rc1) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/sql.git 2.0.0.0-rc1, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/sql.git 2.0.0.0-rc1) - BuildManifest.getCommitId(reportsDashboards) - BuildManifest.getRepo(reportsDashboards) + BundleManifest.getCommitId(reportsDashboards) + BundleManifest.getRepo(reportsDashboards) createReleaseTag.echo(Tagging reportsDashboards at 478ea41b5d962bcf40964ffc7e3f1131f7e1fc34 ...) createReleaseTag.dir(reportsDashboards, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=478ea41b5d962bcf40964ffc7e3f1131f7e1fc34}], userRemoteConfigs=[{url=https://github.com/opensearch-project/dashboards-reports.git}]}) @@ -114,8 +114,8 @@ createReleaseTag.sh(git tag 2.0.0.0-rc1) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/dashboards-reports.git 2.0.0.0-rc1, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/dashboards-reports.git 2.0.0.0-rc1) - BuildManifest.getCommitId(observabilityDashboards) - BuildManifest.getRepo(observabilityDashboards) + BundleManifest.getCommitId(observabilityDashboards) + BundleManifest.getRepo(observabilityDashboards) createReleaseTag.echo(Tagging observabilityDashboards at 52ae188b9a38e96f1d666f51e56ed92998c1d745 ...) createReleaseTag.dir(observabilityDashboards, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=52ae188b9a38e96f1d666f51e56ed92998c1d745}], userRemoteConfigs=[{url=https://github.com/opensearch-project/observability.git}]}) diff --git a/tests/jenkins/jenkinsjob-regression-files/release-tag/release-tag.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/release-tag/release-tag.jenkinsfile.txt index 9041d6d34a..3245fd5cf5 100644 --- a/tests/jenkins/jenkinsjob-regression-files/release-tag/release-tag.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/release-tag/release-tag.jenkinsfile.txt @@ -12,20 +12,20 @@ createReleaseTag.legacySCM(groovy.lang.Closure) createReleaseTag.library({identifier=jenkins@20211123, retriever=null}) createReleaseTag.readYaml({file=tests/data/opensearch-build-1.1.0.yml}) - BuildManifest.asBoolean() - BuildManifest.getNames() + BundleManifest.asBoolean() + BundleManifest.getNames() createReleaseTag.echo(Creating 1.1.0 release tag for 15 components in the manifest) createReleaseTag.usernamePassword({credentialsId=dummy_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) createReleaseTag.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - BuildManifest.getCommitId(index-management) - BuildManifest.getRepo(index-management) + BundleManifest.getCommitId(index-management) + BundleManifest.getRepo(index-management) createReleaseTag.echo(Tagging index-management at 7897e9ae9cd5b49535e6a8bbf4c2f73cb458af24 ...) createReleaseTag.dir(index-management, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=7897e9ae9cd5b49535e6a8bbf4c2f73cb458af24}], userRemoteConfigs=[{url=https://github.com/opensearch-project/index-management.git}]}) createReleaseTag.sh({script=git ls-remote --tags https://github.com/opensearch-project/index-management.git 1.1.0.0 | awk 'NR==1{print $1}', returnStdout=true}) createReleaseTag.echo(Tag 1.1.0.0 has been created with identical commit ID. Skipping creating new tag for index-management.) - BuildManifest.getCommitId(job-scheduler) - BuildManifest.getRepo(job-scheduler) + BundleManifest.getCommitId(job-scheduler) + BundleManifest.getRepo(job-scheduler) createReleaseTag.echo(Tagging job-scheduler at 4504dabfc67dd5628c1451e91e9a1c3c4ca71525 ...) createReleaseTag.dir(job-scheduler, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=4504dabfc67dd5628c1451e91e9a1c3c4ca71525}], userRemoteConfigs=[{url=https://github.com/opensearch-project/job-scheduler.git}]}) @@ -34,8 +34,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/job-scheduler.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/job-scheduler.git 1.1.0.0) - BuildManifest.getCommitId(anomaly-detection) - BuildManifest.getRepo(anomaly-detection) + BundleManifest.getCommitId(anomaly-detection) + BundleManifest.getRepo(anomaly-detection) createReleaseTag.echo(Tagging anomaly-detection at bedc5b620384163abe272e913705fa23cfd3b3a3 ...) createReleaseTag.dir(anomaly-detection, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=bedc5b620384163abe272e913705fa23cfd3b3a3}], userRemoteConfigs=[{url=https://github.com/opensearch-project/anomaly-detection.git}]}) @@ -44,8 +44,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/anomaly-detection.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/anomaly-detection.git 1.1.0.0) - BuildManifest.getCommitId(performance-analyzer) - BuildManifest.getRepo(performance-analyzer) + BundleManifest.getCommitId(performance-analyzer) + BundleManifest.getRepo(performance-analyzer) createReleaseTag.echo(Tagging performance-analyzer at f184f0bc39302ac38af2585c663d619048f6cffe ...) createReleaseTag.dir(performance-analyzer, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=f184f0bc39302ac38af2585c663d619048f6cffe}], userRemoteConfigs=[{url=https://github.com/opensearch-project/performance-analyzer.git}]}) @@ -54,8 +54,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/performance-analyzer.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/performance-analyzer.git 1.1.0.0) - BuildManifest.getCommitId(common-utils) - BuildManifest.getRepo(common-utils) + BundleManifest.getCommitId(common-utils) + BundleManifest.getRepo(common-utils) createReleaseTag.echo(Tagging common-utils at 3913d7097934cbfe1fdcf919347f22a597d00b76 ...) createReleaseTag.dir(common-utils, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=3913d7097934cbfe1fdcf919347f22a597d00b76}], userRemoteConfigs=[{url=https://github.com/opensearch-project/common-utils.git}]}) @@ -64,8 +64,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/common-utils.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/common-utils.git 1.1.0.0) - BuildManifest.getCommitId(asynchronous-search) - BuildManifest.getRepo(asynchronous-search) + BundleManifest.getCommitId(asynchronous-search) + BundleManifest.getRepo(asynchronous-search) createReleaseTag.echo(Tagging asynchronous-search at aa344cc1ecdf9ad21d6cb8d9e368361e6a0f3132 ...) createReleaseTag.dir(asynchronous-search, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=aa344cc1ecdf9ad21d6cb8d9e368361e6a0f3132}], userRemoteConfigs=[{url=https://github.com/opensearch-project/asynchronous-search.git}]}) @@ -74,8 +74,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/asynchronous-search.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/asynchronous-search.git 1.1.0.0) - BuildManifest.getCommitId(dashboards-notebooks) - BuildManifest.getRepo(dashboards-notebooks) + BundleManifest.getCommitId(dashboards-notebooks) + BundleManifest.getRepo(dashboards-notebooks) createReleaseTag.echo(Tagging dashboards-notebooks at 5a996eb8ec8c92e7469cd5e5f87b237352f60f61 ...) createReleaseTag.dir(dashboards-notebooks, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=5a996eb8ec8c92e7469cd5e5f87b237352f60f61}], userRemoteConfigs=[{url=https://github.com/opensearch-project/dashboards-notebooks.git}]}) @@ -84,8 +84,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/dashboards-notebooks.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/dashboards-notebooks.git 1.1.0.0) - BuildManifest.getCommitId(OpenSearch) - BuildManifest.getRepo(OpenSearch) + BundleManifest.getCommitId(OpenSearch) + BundleManifest.getRepo(OpenSearch) createReleaseTag.echo(Tagging OpenSearch at b7334f49d530ffd1a3f7bd0e5832b9b2a9caa583 ...) createReleaseTag.dir(OpenSearch, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=b7334f49d530ffd1a3f7bd0e5832b9b2a9caa583}], userRemoteConfigs=[{url=https://github.com/opensearch-project/OpenSearch.git}]}) @@ -94,8 +94,8 @@ createReleaseTag.sh(git tag 1.1.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/OpenSearch.git 1.1.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/OpenSearch.git 1.1.0) - BuildManifest.getCommitId(sql) - BuildManifest.getRepo(sql) + BundleManifest.getCommitId(sql) + BundleManifest.getRepo(sql) createReleaseTag.echo(Tagging sql at d68547d585092af1e053d01e1b834259723cd304 ...) createReleaseTag.dir(sql, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=d68547d585092af1e053d01e1b834259723cd304}], userRemoteConfigs=[{url=https://github.com/opensearch-project/sql.git}]}) @@ -104,8 +104,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/sql.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/sql.git 1.1.0.0) - BuildManifest.getCommitId(alerting) - BuildManifest.getRepo(alerting) + BundleManifest.getCommitId(alerting) + BundleManifest.getRepo(alerting) createReleaseTag.echo(Tagging alerting at 8024b8b9195f837e49e5bebd7f4a31dfc333eb4d ...) createReleaseTag.dir(alerting, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=8024b8b9195f837e49e5bebd7f4a31dfc333eb4d}], userRemoteConfigs=[{url=https://github.com/opensearch-project/alerting.git}]}) @@ -114,8 +114,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/alerting.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/alerting.git 1.1.0.0) - BuildManifest.getCommitId(security) - BuildManifest.getRepo(security) + BundleManifest.getCommitId(security) + BundleManifest.getRepo(security) createReleaseTag.echo(Tagging security at 534fffe0e6cf2b33b9abcbc6508e98fc2d077a3d ...) createReleaseTag.dir(security, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=534fffe0e6cf2b33b9abcbc6508e98fc2d077a3d}], userRemoteConfigs=[{url=https://github.com/opensearch-project/security.git}]}) @@ -124,8 +124,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/security.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/security.git 1.1.0.0) - BuildManifest.getCommitId(k-NN) - BuildManifest.getRepo(k-NN) + BundleManifest.getCommitId(k-NN) + BundleManifest.getRepo(k-NN) createReleaseTag.echo(Tagging k-NN at 6a3fdcafd75e63521bcf7893ce908642e1d9fcb6 ...) createReleaseTag.dir(k-NN, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=6a3fdcafd75e63521bcf7893ce908642e1d9fcb6}], userRemoteConfigs=[{url=https://github.com/opensearch-project/k-NN.git}]}) @@ -134,8 +134,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/k-NN.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/k-NN.git 1.1.0.0) - BuildManifest.getCommitId(dashboards-reports) - BuildManifest.getRepo(dashboards-reports) + BundleManifest.getCommitId(dashboards-reports) + BundleManifest.getRepo(dashboards-reports) createReleaseTag.echo(Tagging dashboards-reports at 622f334b0724e47f7ffd21cf7e7d521a9f6c949e ...) createReleaseTag.dir(dashboards-reports, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=622f334b0724e47f7ffd21cf7e7d521a9f6c949e}], userRemoteConfigs=[{url=https://github.com/opensearch-project/dashboards-reports.git}]}) @@ -144,8 +144,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/dashboards-reports.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/dashboards-reports.git 1.1.0.0) - BuildManifest.getCommitId(notifications) - BuildManifest.getRepo(notifications) + BundleManifest.getCommitId(notifications) + BundleManifest.getRepo(notifications) createReleaseTag.echo(Tagging notifications at d0d3e485c4a850f73652a989eeec795b7347fbb6 ...) createReleaseTag.dir(notifications, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=d0d3e485c4a850f73652a989eeec795b7347fbb6}], userRemoteConfigs=[{url=https://github.com/opensearch-project/notifications.git}]}) @@ -154,8 +154,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/notifications.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/notifications.git 1.1.0.0) - BuildManifest.getCommitId(performance-analyzer-rca) - BuildManifest.getRepo(performance-analyzer-rca) + BundleManifest.getCommitId(performance-analyzer-rca) + BundleManifest.getRepo(performance-analyzer-rca) createReleaseTag.echo(Tagging performance-analyzer-rca at 345a10fd4f4e94d6392c925ad95503ba8addd152 ...) createReleaseTag.dir(performance-analyzer-rca, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=345a10fd4f4e94d6392c925ad95503ba8addd152}], userRemoteConfigs=[{url=https://github.com/opensearch-project/performance-analyzer-rca.git}]}) diff --git a/tests/jenkins/jenkinsjob-regression-files/sign-standalone-artifacts/sign-standalone-artifacts.jenkinsfile.txt b/tests/jenkins/jenkinsjob-regression-files/sign-standalone-artifacts/sign-standalone-artifacts.jenkinsfile.txt index 1eec75e275..8bac43d2d3 100644 --- a/tests/jenkins/jenkinsjob-regression-files/sign-standalone-artifacts/sign-standalone-artifacts.jenkinsfile.txt +++ b/tests/jenkins/jenkinsjob-regression-files/sign-standalone-artifacts/sign-standalone-artifacts.jenkinsfile.txt @@ -2,6 +2,7 @@ sign-standalone-artifacts.legacySCM(groovy.lang.Closure) sign-standalone-artifacts.library({identifier=jenkins@20211123, retriever=null}) sign-standalone-artifacts.pipeline(groovy.lang.Closure) + sign-standalone-artifacts.credentials(jenkins-artifact-bucket-name) sign-standalone-artifacts.echo(Executing on agent [docker:[image:opensearchstaging/ci-runner:ci-runner-rockylinux8-opensearch-build-v2, reuseNode:false, stages:[:], args:, alwaysPull:true, containerPerStageRoot:false, label:Jenkins-Agent-al2-x64-c54xlarge-Docker-Host]]) sign-standalone-artifacts.stage(sign, groovy.lang.Closure) sign-standalone-artifacts.script(groovy.lang.Closure) @@ -9,22 +10,26 @@ sign-standalone-artifacts.sh(curl -SL https://www.dummy.com/dummy_1_artifact.tar.gz -o /tmp/workspace/artifacts/dummy_1_artifact.tar.gz) sign-standalone-artifacts.sh(curl -SL https://www.dummy.com/dummy_2_artifact.tar.gz -o /tmp/workspace/artifacts/dummy_2_artifact.tar.gz) sign-standalone-artifacts.signArtifacts({artifactPath=/tmp/workspace/artifacts, sigtype=.sig, platform=linux}) - signArtifacts.echo(PGP Signature Signing) + signArtifacts.echo(PGP or Windows Signature Signing) signArtifacts.fileExists(/tmp/workspace/sign.sh) signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - /tmp/workspace/sign.sh /tmp/workspace/artifacts --sigtype=.sig --platform=linux - ) + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + /tmp/workspace/sign.sh /tmp/workspace/artifacts --sigtype=.sig --platform=linux + ) sign-standalone-artifacts.uploadToS3({sourcePath=/tmp/workspace/artifacts, bucket=dummy_bucket_name, path=sign_artifacts_job/dummy/upload/path/20/dist/signed}) uploadToS3.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) uploadToS3.withCredentials([AWS_ACCOUNT_PUBLIC], groovy.lang.Closure) diff --git a/tests/jenkins/jobs/AssembleManifest_rpm_Jenkinsfile.txt b/tests/jenkins/jobs/AssembleManifest_rpm_Jenkinsfile.txt index 4490d61a10..56db0bc8bf 100644 --- a/tests/jenkins/jobs/AssembleManifest_rpm_Jenkinsfile.txt +++ b/tests/jenkins/jobs/AssembleManifest_rpm_Jenkinsfile.txt @@ -13,11 +13,11 @@ BuildManifest.getArtifactRootUrlWithoutDistribution(https://ci.opensearch.org/dbc, vars-build, 123) assembleManifest.sh(./assemble.sh "tests/data/opensearch-build-1.3.0-rpm.yml" --base-url https://ci.opensearch.org/dbc/vars-build/1.3.0/123/linux/x64) assembleManifest.signArtifacts({artifactPath=rpm/dist/opensearch, sigtype=.rpm, platform=linux}) - signArtifacts.echo(RPM Add Sign) - signArtifacts.withAWS({role=sign_asm_role, roleAccount=sign_asm_account, duration=900, roleSessionName=jenkins-signing-session}, groovy.lang.Closure) - signArtifacts.string({credentialsId=jenkins-rpm-signing-asm-pass-id, variable=SIGNING_PASS_ID}) - signArtifacts.string({credentialsId=jenkins-rpm-signing-asm-secret-id, variable=SIGNING_SECRET_ID}) - signArtifacts.withCredentials([SIGNING_PASS_ID, SIGNING_SECRET_ID], groovy.lang.Closure) + signArtifacts.string({credentialsId=jenkins-rpm-signing-props, variable=configs}) + signArtifacts.withCredentials([configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.echo(RPM Add Sign) + signArtifacts.withAWS({role=jenki-jenki-asm-assume-role, roleAccount=1234, duration=900, roleSessionName=jenkins-signing-session}, groovy.lang.Closure) signArtifacts.sh( set -e set +x @@ -57,8 +57,8 @@ echo "------------------------------------------------------------------------" echo "Import OpenSearch keys" - aws secretsmanager get-secret-value --region "sign_asm_region" --secret-id "SIGNING_PASS_ID" | jq -r .SecretBinary | base64 --decode > passphrase - aws secretsmanager get-secret-value --region "sign_asm_region" --secret-id "SIGNING_SECRET_ID" | jq -r .SecretBinary | base64 --decode | gpg --quiet --import --pinentry-mode loopback --passphrase-file passphrase - + aws secretsmanager get-secret-value --region us-west-2 --secret-id "ARN::123456" | jq -r .SecretBinary | base64 --decode > passphrase + aws secretsmanager get-secret-value --region us-west-2 --secret-id "ARN::56789" | jq -r .SecretBinary | base64 --decode | gpg --quiet --import --pinentry-mode loopback --passphrase-file passphrase - echo "------------------------------------------------------------------------" echo "Start Signing Rpm" @@ -85,8 +85,8 @@ echo "------------------------------------------------------------------------" echo "Clean up gpg" - gpg --batch --yes --delete-secret-keys sign_asm_keyid - gpg --batch --yes --delete-keys sign_asm_keyid + gpg --batch --yes --delete-secret-keys abcd1234 + gpg --batch --yes --delete-keys abcd1234 rm -v passphrase ) diff --git a/tests/jenkins/jobs/BuildUploadManifestSHA_Jenkinsfile.txt b/tests/jenkins/jobs/BuildUploadManifestSHA_Jenkinsfile.txt index bffbe238c1..c163c31312 100644 --- a/tests/jenkins/jobs/BuildUploadManifestSHA_Jenkinsfile.txt +++ b/tests/jenkins/jobs/BuildUploadManifestSHA_Jenkinsfile.txt @@ -18,8 +18,14 @@ InputManifest.getSHAsRoot(get-manifest-sha-build) getManifestSHA.echo(Manifest lock: tests/jenkins/data/opensearch-1.3.0.yml.lock) getManifestSHA.echo(Manifest SHA path: get-manifest-sha-build/1.3.0/shas/sha1.yml) - getManifestSHA.withAWS({role=opensearch-bundle, roleAccount=account, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - getManifestSHA.s3DoesObjectExist({bucket=artifact-bucket, path=get-manifest-sha-build/1.3.0/shas/sha1.yml}) + getManifestSHA.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + getManifestSHA.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + getManifestSHA.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME], groovy.lang.Closure) + getManifestSHA.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + getManifestSHA.s3DoesObjectExist({bucket=ARTIFACT_BUCKET_NAME, path=get-manifest-sha-build/1.3.0/shas/sha1.yml}) getManifestSHA.echo(Manifest SHA exists: false) - buildUploadManifestSHA.withAWS({role=opensearch-bundle, roleAccount=account, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - buildUploadManifestSHA.s3Upload({bucket=artifact-bucket, file=tests/jenkins/data/opensearch-1.3.0.yml.lock, path=get-manifest-sha-build/1.3.0/shas/sha1.yml}) + buildUploadManifestSHA.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + buildUploadManifestSHA.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + buildUploadManifestSHA.withCredentials([ARTIFACT_BUCKET_NAME, AWS_ACCOUNT_PUBLIC], groovy.lang.Closure) + buildUploadManifestSHA.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + buildUploadManifestSHA.s3Upload({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/data/opensearch-1.3.0.yml.lock, path=get-manifest-sha-build/1.3.0/shas/sha1.yml}) diff --git a/tests/jenkins/jobs/CreateReleaseTag_Jenkinsfile.txt b/tests/jenkins/jobs/CreateReleaseTag_Jenkinsfile.txt index 6f87b91eb6..cbf5415cc4 100644 --- a/tests/jenkins/jobs/CreateReleaseTag_Jenkinsfile.txt +++ b/tests/jenkins/jobs/CreateReleaseTag_Jenkinsfile.txt @@ -7,20 +7,20 @@ createReleaseTag.legacySCM(groovy.lang.Closure) createReleaseTag.library({identifier=jenkins@20211123, retriever=null}) createReleaseTag.readYaml({file=tests/data/opensearch-build-1.1.0.yml}) - BuildManifest.asBoolean() - BuildManifest.getNames() + BundleManifest.asBoolean() + BundleManifest.getNames() createReleaseTag.echo(Creating 1.1.0 release tag for 15 components in the manifest) createReleaseTag.usernamePassword({credentialsId=dummy_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) createReleaseTag.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - BuildManifest.getCommitId(index-management) - BuildManifest.getRepo(index-management) + BundleManifest.getCommitId(index-management) + BundleManifest.getRepo(index-management) createReleaseTag.echo(Tagging index-management at 7897e9ae9cd5b49535e6a8bbf4c2f73cb458af24 ...) createReleaseTag.dir(index-management, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=7897e9ae9cd5b49535e6a8bbf4c2f73cb458af24}], userRemoteConfigs=[{url=https://github.com/opensearch-project/index-management.git}]}) createReleaseTag.sh({script=git ls-remote --tags https://github.com/opensearch-project/index-management.git 1.1.0.0 | awk 'NR==1{print $1}', returnStdout=true}) createReleaseTag.echo(Tag 1.1.0.0 has been created with identical commit ID. Skipping creating new tag for index-management.) - BuildManifest.getCommitId(job-scheduler) - BuildManifest.getRepo(job-scheduler) + BundleManifest.getCommitId(job-scheduler) + BundleManifest.getRepo(job-scheduler) createReleaseTag.echo(Tagging job-scheduler at 4504dabfc67dd5628c1451e91e9a1c3c4ca71525 ...) createReleaseTag.dir(job-scheduler, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=4504dabfc67dd5628c1451e91e9a1c3c4ca71525}], userRemoteConfigs=[{url=https://github.com/opensearch-project/job-scheduler.git}]}) @@ -29,8 +29,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/job-scheduler.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/job-scheduler.git 1.1.0.0) - BuildManifest.getCommitId(anomaly-detection) - BuildManifest.getRepo(anomaly-detection) + BundleManifest.getCommitId(anomaly-detection) + BundleManifest.getRepo(anomaly-detection) createReleaseTag.echo(Tagging anomaly-detection at bedc5b620384163abe272e913705fa23cfd3b3a3 ...) createReleaseTag.dir(anomaly-detection, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=bedc5b620384163abe272e913705fa23cfd3b3a3}], userRemoteConfigs=[{url=https://github.com/opensearch-project/anomaly-detection.git}]}) @@ -39,8 +39,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/anomaly-detection.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/anomaly-detection.git 1.1.0.0) - BuildManifest.getCommitId(performance-analyzer) - BuildManifest.getRepo(performance-analyzer) + BundleManifest.getCommitId(performance-analyzer) + BundleManifest.getRepo(performance-analyzer) createReleaseTag.echo(Tagging performance-analyzer at f184f0bc39302ac38af2585c663d619048f6cffe ...) createReleaseTag.dir(performance-analyzer, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=f184f0bc39302ac38af2585c663d619048f6cffe}], userRemoteConfigs=[{url=https://github.com/opensearch-project/performance-analyzer.git}]}) @@ -49,8 +49,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/performance-analyzer.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/performance-analyzer.git 1.1.0.0) - BuildManifest.getCommitId(common-utils) - BuildManifest.getRepo(common-utils) + BundleManifest.getCommitId(common-utils) + BundleManifest.getRepo(common-utils) createReleaseTag.echo(Tagging common-utils at 3913d7097934cbfe1fdcf919347f22a597d00b76 ...) createReleaseTag.dir(common-utils, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=3913d7097934cbfe1fdcf919347f22a597d00b76}], userRemoteConfigs=[{url=https://github.com/opensearch-project/common-utils.git}]}) @@ -59,8 +59,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/common-utils.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/common-utils.git 1.1.0.0) - BuildManifest.getCommitId(asynchronous-search) - BuildManifest.getRepo(asynchronous-search) + BundleManifest.getCommitId(asynchronous-search) + BundleManifest.getRepo(asynchronous-search) createReleaseTag.echo(Tagging asynchronous-search at aa344cc1ecdf9ad21d6cb8d9e368361e6a0f3132 ...) createReleaseTag.dir(asynchronous-search, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=aa344cc1ecdf9ad21d6cb8d9e368361e6a0f3132}], userRemoteConfigs=[{url=https://github.com/opensearch-project/asynchronous-search.git}]}) @@ -69,8 +69,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/asynchronous-search.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/asynchronous-search.git 1.1.0.0) - BuildManifest.getCommitId(dashboards-notebooks) - BuildManifest.getRepo(dashboards-notebooks) + BundleManifest.getCommitId(dashboards-notebooks) + BundleManifest.getRepo(dashboards-notebooks) createReleaseTag.echo(Tagging dashboards-notebooks at 5a996eb8ec8c92e7469cd5e5f87b237352f60f61 ...) createReleaseTag.dir(dashboards-notebooks, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=5a996eb8ec8c92e7469cd5e5f87b237352f60f61}], userRemoteConfigs=[{url=https://github.com/opensearch-project/dashboards-notebooks.git}]}) @@ -79,8 +79,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/dashboards-notebooks.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/dashboards-notebooks.git 1.1.0.0) - BuildManifest.getCommitId(OpenSearch) - BuildManifest.getRepo(OpenSearch) + BundleManifest.getCommitId(OpenSearch) + BundleManifest.getRepo(OpenSearch) createReleaseTag.echo(Tagging OpenSearch at b7334f49d530ffd1a3f7bd0e5832b9b2a9caa583 ...) createReleaseTag.dir(OpenSearch, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=b7334f49d530ffd1a3f7bd0e5832b9b2a9caa583}], userRemoteConfigs=[{url=https://github.com/opensearch-project/OpenSearch.git}]}) @@ -89,8 +89,8 @@ createReleaseTag.sh(git tag 1.1.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/OpenSearch.git 1.1.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/OpenSearch.git 1.1.0) - BuildManifest.getCommitId(sql) - BuildManifest.getRepo(sql) + BundleManifest.getCommitId(sql) + BundleManifest.getRepo(sql) createReleaseTag.echo(Tagging sql at d68547d585092af1e053d01e1b834259723cd304 ...) createReleaseTag.dir(sql, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=d68547d585092af1e053d01e1b834259723cd304}], userRemoteConfigs=[{url=https://github.com/opensearch-project/sql.git}]}) @@ -99,8 +99,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/sql.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/sql.git 1.1.0.0) - BuildManifest.getCommitId(alerting) - BuildManifest.getRepo(alerting) + BundleManifest.getCommitId(alerting) + BundleManifest.getRepo(alerting) createReleaseTag.echo(Tagging alerting at 8024b8b9195f837e49e5bebd7f4a31dfc333eb4d ...) createReleaseTag.dir(alerting, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=8024b8b9195f837e49e5bebd7f4a31dfc333eb4d}], userRemoteConfigs=[{url=https://github.com/opensearch-project/alerting.git}]}) @@ -109,8 +109,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/alerting.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/alerting.git 1.1.0.0) - BuildManifest.getCommitId(security) - BuildManifest.getRepo(security) + BundleManifest.getCommitId(security) + BundleManifest.getRepo(security) createReleaseTag.echo(Tagging security at 534fffe0e6cf2b33b9abcbc6508e98fc2d077a3d ...) createReleaseTag.dir(security, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=534fffe0e6cf2b33b9abcbc6508e98fc2d077a3d}], userRemoteConfigs=[{url=https://github.com/opensearch-project/security.git}]}) @@ -119,8 +119,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/security.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/security.git 1.1.0.0) - BuildManifest.getCommitId(k-NN) - BuildManifest.getRepo(k-NN) + BundleManifest.getCommitId(k-NN) + BundleManifest.getRepo(k-NN) createReleaseTag.echo(Tagging k-NN at 6a3fdcafd75e63521bcf7893ce908642e1d9fcb6 ...) createReleaseTag.dir(k-NN, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=6a3fdcafd75e63521bcf7893ce908642e1d9fcb6}], userRemoteConfigs=[{url=https://github.com/opensearch-project/k-NN.git}]}) @@ -129,8 +129,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/k-NN.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/k-NN.git 1.1.0.0) - BuildManifest.getCommitId(dashboards-reports) - BuildManifest.getRepo(dashboards-reports) + BundleManifest.getCommitId(dashboards-reports) + BundleManifest.getRepo(dashboards-reports) createReleaseTag.echo(Tagging dashboards-reports at 622f334b0724e47f7ffd21cf7e7d521a9f6c949e ...) createReleaseTag.dir(dashboards-reports, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=622f334b0724e47f7ffd21cf7e7d521a9f6c949e}], userRemoteConfigs=[{url=https://github.com/opensearch-project/dashboards-reports.git}]}) @@ -139,8 +139,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/dashboards-reports.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/dashboards-reports.git 1.1.0.0) - BuildManifest.getCommitId(notifications) - BuildManifest.getRepo(notifications) + BundleManifest.getCommitId(notifications) + BundleManifest.getRepo(notifications) createReleaseTag.echo(Tagging notifications at d0d3e485c4a850f73652a989eeec795b7347fbb6 ...) createReleaseTag.dir(notifications, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=d0d3e485c4a850f73652a989eeec795b7347fbb6}], userRemoteConfigs=[{url=https://github.com/opensearch-project/notifications.git}]}) @@ -149,8 +149,8 @@ createReleaseTag.sh(git tag 1.1.0.0) createReleaseTag.sh({script=git push https://GITHUB_TOKEN@github.com/opensearch-project/notifications.git 1.1.0.0, returnStatus=true}) createReleaseTag.sh(git push https://GITHUB_TOKEN@github.com/opensearch-project/notifications.git 1.1.0.0) - BuildManifest.getCommitId(performance-analyzer-rca) - BuildManifest.getRepo(performance-analyzer-rca) + BundleManifest.getCommitId(performance-analyzer-rca) + BundleManifest.getRepo(performance-analyzer-rca) createReleaseTag.echo(Tagging performance-analyzer-rca at 345a10fd4f4e94d6392c925ad95503ba8addd152 ...) createReleaseTag.dir(performance-analyzer-rca, groovy.lang.Closure) createReleaseTag.checkout({$class=GitSCM, branches=[{name=345a10fd4f4e94d6392c925ad95503ba8addd152}], userRemoteConfigs=[{url=https://github.com/opensearch-project/performance-analyzer-rca.git}]}) diff --git a/tests/jenkins/jobs/DownloadFromS3_Jenkinsfile.txt b/tests/jenkins/jobs/DownloadFromS3_Jenkinsfile.txt index 992c7ff82c..d8eb6c1c83 100644 --- a/tests/jenkins/jobs/DownloadFromS3_Jenkinsfile.txt +++ b/tests/jenkins/jobs/DownloadFromS3_Jenkinsfile.txt @@ -4,5 +4,7 @@ DownloadFromS3_Jenkinsfile.stage(download, groovy.lang.Closure) DownloadFromS3_Jenkinsfile.script(groovy.lang.Closure) DownloadFromS3_Jenkinsfile.downloadFromS3({destPath=/tmp/src/path, bucket=dummy_bucket, path=/download/path, force=true}) - downloadFromS3.withAWS({role=Dummy_Download_Role, roleAccount=dummy_account, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - downloadFromS3.s3Download({file=/tmp/src/path, bucket=dummy_bucket, path=/download/path, force=true}) + downloadFromS3.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + downloadFromS3.withCredentials([AWS_ACCOUNT_PUBLIC], groovy.lang.Closure) + downloadFromS3.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + downloadFromS3.s3Download({file=/tmp/src/path, bucket=dummy_bucket, path=/download/path, force=true}) diff --git a/tests/jenkins/jobs/GetManifestSHA_Jenkinsfile_does_not_exist.txt b/tests/jenkins/jobs/GetManifestSHA_Jenkinsfile_does_not_exist.txt index ac58fb01af..219eadbcc4 100644 --- a/tests/jenkins/jobs/GetManifestSHA_Jenkinsfile_does_not_exist.txt +++ b/tests/jenkins/jobs/GetManifestSHA_Jenkinsfile_does_not_exist.txt @@ -15,8 +15,11 @@ InputManifest.getSHAsRoot(get-manifest-sha-build) getManifestSHA.echo(Manifest lock: tests/jenkins/data/opensearch-1.3.0.yml.lock) getManifestSHA.echo(Manifest SHA path: get-manifest-sha-build/1.3.0/shas/sha1.yml) - getManifestSHA.withAWS({role=opensearch-bundle, roleAccount=account, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - getManifestSHA.s3DoesObjectExist({bucket=artifact-bucket, path=get-manifest-sha-build/1.3.0/shas/sha1.yml}) + getManifestSHA.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + getManifestSHA.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + getManifestSHA.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME], groovy.lang.Closure) + getManifestSHA.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + getManifestSHA.s3DoesObjectExist({bucket=ARTIFACT_BUCKET_NAME, path=get-manifest-sha-build/1.3.0/shas/sha1.yml}) getManifestSHA.echo(Manifest SHA exists: false) GetManifestSHA_Jenkinsfile.echo(sha: sha1) GetManifestSHA_Jenkinsfile.echo(exists: false) diff --git a/tests/jenkins/jobs/GetManifestSHA_Jenkinsfile_exists.txt b/tests/jenkins/jobs/GetManifestSHA_Jenkinsfile_exists.txt index 81b97f07b5..5a18992ce9 100644 --- a/tests/jenkins/jobs/GetManifestSHA_Jenkinsfile_exists.txt +++ b/tests/jenkins/jobs/GetManifestSHA_Jenkinsfile_exists.txt @@ -15,8 +15,11 @@ InputManifest.getSHAsRoot(get-manifest-sha-build) getManifestSHA.echo(Manifest lock: tests/jenkins/data/opensearch-1.3.0.yml.lock) getManifestSHA.echo(Manifest SHA path: get-manifest-sha-build/1.3.0/shas/sha1.yml) - getManifestSHA.withAWS({role=opensearch-bundle, roleAccount=account, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - getManifestSHA.s3DoesObjectExist({bucket=artifact-bucket, path=get-manifest-sha-build/1.3.0/shas/sha1.yml}) + getManifestSHA.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + getManifestSHA.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + getManifestSHA.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME], groovy.lang.Closure) + getManifestSHA.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + getManifestSHA.s3DoesObjectExist({bucket=ARTIFACT_BUCKET_NAME, path=get-manifest-sha-build/1.3.0/shas/sha1.yml}) getManifestSHA.echo(Manifest SHA exists: true) GetManifestSHA_Jenkinsfile.echo(sha: sha1) GetManifestSHA_Jenkinsfile.echo(exists: true) diff --git a/tests/jenkins/jobs/PromoteArtifactsQualifier_Jenkinsfile.txt b/tests/jenkins/jobs/PromoteArtifactsQualifier_Jenkinsfile.txt index d1f3dfe25c..1a75c7f2b9 100644 --- a/tests/jenkins/jobs/PromoteArtifactsQualifier_Jenkinsfile.txt +++ b/tests/jenkins/jobs/PromoteArtifactsQualifier_Jenkinsfile.txt @@ -8,16 +8,28 @@ promoteArtifacts.library({identifier=jenkins@20211123, retriever=null}) promoteArtifacts.readYaml({file=tests/jenkins/data/opensearch-2.0.0-rc1.yml}) InputManifest.asBoolean() - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/tar, path=vars-build/2.0.0-rc1/33/linux/x64/tar/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/core-plugins) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/core/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/dist/, includePathPattern=**/opensearch-min-2.0.0-rc1-linux-x64*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/dist/opensearch/, includePathPattern=**/opensearch-2.0.0-rc1-linux-x64*}) - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/rpm, path=vars-build/2.0.0-rc1/33/linux/x64/rpm/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch/core-plugins) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/dist/opensearch/, includePathPattern=**/opensearch-2.0.0-rc1-linux-x64*}) + promoteArtifacts.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + promoteArtifacts.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + promoteArtifacts.string({credentialsId=jenkins-artifact-promotion-role, variable=ARTIFACT_PROMOTION_ROLE_NAME}) + promoteArtifacts.string({credentialsId=jenkins-aws-production-account, variable=AWS_ACCOUNT_ARTIFACT}) + promoteArtifacts.string({credentialsId=jenkins-artifact-production-bucket-name, variable=ARTIFACT_PRODUCTION_BUCKET_NAME}) + promoteArtifacts.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME, ARTIFACT_PROMOTION_ROLE_NAME, AWS_ACCOUNT_ARTIFACT, ARTIFACT_PRODUCTION_BUCKET_NAME], groovy.lang.Closure) + promoteArtifacts.println(S3 download tar artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/tar, path=vars-build/2.0.0-rc1/33/linux/x64/tar/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/core/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/dist/, includePathPattern=**/opensearch-min-2.0.0-rc1-linux-x64*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/dist/opensearch/, includePathPattern=**/opensearch-2.0.0-rc1-linux-x64*}) + promoteArtifacts.println(S3 download rpm artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/rpm, path=vars-build/2.0.0-rc1/33/linux/x64/rpm/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/dist/opensearch/, includePathPattern=**/opensearch-2.0.0-rc1-linux-x64*}) diff --git a/tests/jenkins/jobs/PromoteArtifactsQualifier_OpenSearch_Dashboards_Jenkinsfile.txt b/tests/jenkins/jobs/PromoteArtifactsQualifier_OpenSearch_Dashboards_Jenkinsfile.txt index b15ad19306..d72924bc28 100644 --- a/tests/jenkins/jobs/PromoteArtifactsQualifier_OpenSearch_Dashboards_Jenkinsfile.txt +++ b/tests/jenkins/jobs/PromoteArtifactsQualifier_OpenSearch_Dashboards_Jenkinsfile.txt @@ -8,16 +8,28 @@ promoteArtifacts.library({identifier=jenkins@20211123, retriever=null}) promoteArtifacts.readYaml({file=tests/jenkins/data/opensearch-dashboards-2.0.0-rc1.yml}) InputManifest.asBoolean() - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/tar, path=vars-build/2.0.0-rc1/33/linux/x64/tar/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/core-plugins) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/core/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/dist/, includePathPattern=**/opensearch-dashboards-min-2.0.0-rc1-linux-x64*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-2.0.0-rc1-linux-x64*}) - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/rpm, path=vars-build/2.0.0-rc1/33/linux/x64/rpm/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch-dashboards/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch-dashboards/core-plugins) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-2.0.0-rc1-linux-x64*}) + promoteArtifacts.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + promoteArtifacts.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + promoteArtifacts.string({credentialsId=jenkins-artifact-promotion-role, variable=ARTIFACT_PROMOTION_ROLE_NAME}) + promoteArtifacts.string({credentialsId=jenkins-aws-production-account, variable=AWS_ACCOUNT_ARTIFACT}) + promoteArtifacts.string({credentialsId=jenkins-artifact-production-bucket-name, variable=ARTIFACT_PRODUCTION_BUCKET_NAME}) + promoteArtifacts.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME, ARTIFACT_PROMOTION_ROLE_NAME, AWS_ACCOUNT_ARTIFACT, ARTIFACT_PRODUCTION_BUCKET_NAME], groovy.lang.Closure) + promoteArtifacts.println(S3 download tar artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/tar, path=vars-build/2.0.0-rc1/33/linux/x64/tar/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/core/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/dist/, includePathPattern=**/opensearch-dashboards-min-2.0.0-rc1-linux-x64*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-2.0.0-rc1-linux-x64*}) + promoteArtifacts.println(S3 download rpm artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/rpm, path=vars-build/2.0.0-rc1/33/linux/x64/rpm/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch-dashboards/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch-dashboards/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-2.0.0-rc1-linux-x64*}) diff --git a/tests/jenkins/jobs/PromoteArtifactsQualifier_actions_Jenkinsfile.txt b/tests/jenkins/jobs/PromoteArtifactsQualifier_actions_Jenkinsfile.txt index 41c2291da9..e032a78a52 100644 --- a/tests/jenkins/jobs/PromoteArtifactsQualifier_actions_Jenkinsfile.txt +++ b/tests/jenkins/jobs/PromoteArtifactsQualifier_actions_Jenkinsfile.txt @@ -12,64 +12,84 @@ promoteArtifacts.library({identifier=jenkins@20211123, retriever=null}) promoteArtifacts.readYaml({file=tests/jenkins/data/opensearch-2.0.0-rc1.yml}) InputManifest.asBoolean() - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/tar, path=vars-build/2.0.0-rc1/33/linux/x64/tar/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/core-plugins) - promoteArtifacts.findFiles({glob=**/opensearch-min-2.0.0-rc1*.tar*,**/opensearch-2.0.0-rc1*.tar*}) - promoteArtifacts.getPath() - createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) - createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - promoteArtifacts.findFiles({glob=**/opensearch-min-2.0.0-rc1*.tar*,**/opensearch-2.0.0-rc1*.tar*}) - promoteArtifacts.getPath() - createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) - signArtifacts.echo(PGP Signature Signing) - signArtifacts.fileExists(tests/jenkins/sign.sh) - signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) - signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) - signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig - ) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/core/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/dist/, includePathPattern=**/opensearch-min-2.0.0-rc1-linux-x64*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/dist/opensearch/, includePathPattern=**/opensearch-2.0.0-rc1-linux-x64*}) - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/rpm, path=vars-build/2.0.0-rc1/33/linux/x64/rpm/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch/core-plugins) - promoteArtifacts.findFiles({glob=**/opensearch-min-2.0.0-rc1*.rpm*,**/opensearch-2.0.0-rc1*.rpm*}) - promoteArtifacts.getPath() - createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) - createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - promoteArtifacts.findFiles({glob=**/opensearch-min-2.0.0-rc1*.rpm*,**/opensearch-2.0.0-rc1*.rpm*}) - promoteArtifacts.getPath() - createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) - signArtifacts.echo(PGP Signature Signing) - signArtifacts.fileExists(tests/jenkins/sign.sh) - signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) - signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) - signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig - ) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/dist/opensearch/, includePathPattern=**/opensearch-2.0.0-rc1-linux-x64*}) + promoteArtifacts.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + promoteArtifacts.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + promoteArtifacts.string({credentialsId=jenkins-artifact-promotion-role, variable=ARTIFACT_PROMOTION_ROLE_NAME}) + promoteArtifacts.string({credentialsId=jenkins-aws-production-account, variable=AWS_ACCOUNT_ARTIFACT}) + promoteArtifacts.string({credentialsId=jenkins-artifact-production-bucket-name, variable=ARTIFACT_PRODUCTION_BUCKET_NAME}) + promoteArtifacts.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME, ARTIFACT_PROMOTION_ROLE_NAME, AWS_ACCOUNT_ARTIFACT, ARTIFACT_PRODUCTION_BUCKET_NAME], groovy.lang.Closure) + promoteArtifacts.println(S3 download tar artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/tar, path=vars-build/2.0.0-rc1/33/linux/x64/tar/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.findFiles({glob=**/opensearch-min-2.0.0-rc1*.tar*,**/opensearch-2.0.0-rc1*.tar*}) + promoteArtifacts.getPath() + createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) + createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + promoteArtifacts.findFiles({glob=**/opensearch-min-2.0.0-rc1*.tar*,**/opensearch-2.0.0-rc1*.tar*}) + promoteArtifacts.getPath() + createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) + signArtifacts.echo(PGP or Windows Signature Signing) + signArtifacts.fileExists(tests/jenkins/sign.sh) + signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) + signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) + signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.sh( + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig + ) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/core/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch/dist/, includePathPattern=**/opensearch-min-2.0.0-rc1-linux-x64*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/dist/opensearch/, includePathPattern=**/opensearch-2.0.0-rc1-linux-x64*}) + promoteArtifacts.println(S3 download rpm artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/rpm, path=vars-build/2.0.0-rc1/33/linux/x64/rpm/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.findFiles({glob=**/opensearch-min-2.0.0-rc1*.rpm*,**/opensearch-2.0.0-rc1*.rpm*}) + promoteArtifacts.getPath() + createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) + createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + promoteArtifacts.findFiles({glob=**/opensearch-min-2.0.0-rc1*.rpm*,**/opensearch-2.0.0-rc1*.rpm*}) + promoteArtifacts.getPath() + createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) + signArtifacts.echo(PGP or Windows Signature Signing) + signArtifacts.fileExists(tests/jenkins/sign.sh) + signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) + signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) + signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.sh( + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig + ) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/dist/opensearch/, includePathPattern=**/opensearch-2.0.0-rc1-linux-x64*}) diff --git a/tests/jenkins/jobs/PromoteArtifactsQualifier_actions_OpenSearch_Dashboards_Jenkinsfile.txt b/tests/jenkins/jobs/PromoteArtifactsQualifier_actions_OpenSearch_Dashboards_Jenkinsfile.txt index 5c4f7880ef..0af3a178bb 100644 --- a/tests/jenkins/jobs/PromoteArtifactsQualifier_actions_OpenSearch_Dashboards_Jenkinsfile.txt +++ b/tests/jenkins/jobs/PromoteArtifactsQualifier_actions_OpenSearch_Dashboards_Jenkinsfile.txt @@ -12,64 +12,84 @@ promoteArtifacts.library({identifier=jenkins@20211123, retriever=null}) promoteArtifacts.readYaml({file=tests/jenkins/data/opensearch-dashboards-2.0.0-rc1.yml}) InputManifest.asBoolean() - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/tar, path=vars-build/2.0.0-rc1/33/linux/x64/tar/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/core-plugins) - promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-2.0.0-rc1*.tar*,**/opensearch-dashboards-2.0.0-rc1*.tar*}) - promoteArtifacts.getPath() - createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) - createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-2.0.0-rc1*.tar*,**/opensearch-dashboards-2.0.0-rc1*.tar*}) - promoteArtifacts.getPath() - createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) - signArtifacts.echo(PGP Signature Signing) - signArtifacts.fileExists(tests/jenkins/sign.sh) - signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) - signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) - signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig - ) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/core/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/dist/, includePathPattern=**/opensearch-dashboards-min-2.0.0-rc1-linux-x64*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-2.0.0-rc1-linux-x64*}) - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/rpm, path=vars-build/2.0.0-rc1/33/linux/x64/rpm/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch-dashboards/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch-dashboards/core-plugins) - promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-2.0.0-rc1*.rpm*,**/opensearch-dashboards-2.0.0-rc1*.rpm*}) - promoteArtifacts.getPath() - createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) - createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-2.0.0-rc1*.rpm*,**/opensearch-dashboards-2.0.0-rc1*.rpm*}) - promoteArtifacts.getPath() - createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) - signArtifacts.echo(PGP Signature Signing) - signArtifacts.fileExists(tests/jenkins/sign.sh) - signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) - signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) - signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig - ) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-2.0.0-rc1-linux-x64*}) + promoteArtifacts.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + promoteArtifacts.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + promoteArtifacts.string({credentialsId=jenkins-artifact-promotion-role, variable=ARTIFACT_PROMOTION_ROLE_NAME}) + promoteArtifacts.string({credentialsId=jenkins-aws-production-account, variable=AWS_ACCOUNT_ARTIFACT}) + promoteArtifacts.string({credentialsId=jenkins-artifact-production-bucket-name, variable=ARTIFACT_PRODUCTION_BUCKET_NAME}) + promoteArtifacts.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME, ARTIFACT_PROMOTION_ROLE_NAME, AWS_ACCOUNT_ARTIFACT, ARTIFACT_PRODUCTION_BUCKET_NAME], groovy.lang.Closure) + promoteArtifacts.println(S3 download tar artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/tar, path=vars-build/2.0.0-rc1/33/linux/x64/tar/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-2.0.0-rc1*.tar*,**/opensearch-dashboards-2.0.0-rc1*.tar*}) + promoteArtifacts.getPath() + createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) + createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-2.0.0-rc1*.tar*,**/opensearch-dashboards-2.0.0-rc1*.tar*}) + promoteArtifacts.getPath() + createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) + signArtifacts.echo(PGP or Windows Signature Signing) + signArtifacts.fileExists(tests/jenkins/sign.sh) + signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) + signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) + signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.sh( + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig + ) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/core/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/builds/opensearch-dashboards/dist/, includePathPattern=**/opensearch-dashboards-min-2.0.0-rc1-linux-x64*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/tar/vars-build/2.0.0-rc1/33/linux/x64/tar/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-2.0.0-rc1-linux-x64*}) + promoteArtifacts.println(S3 download rpm artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/rpm, path=vars-build/2.0.0-rc1/33/linux/x64/rpm/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch-dashboards/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/builds/opensearch-dashboards/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-2.0.0-rc1*.rpm*,**/opensearch-dashboards-2.0.0-rc1*.rpm*}) + promoteArtifacts.getPath() + createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) + createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-2.0.0-rc1*.rpm*,**/opensearch-dashboards-2.0.0-rc1*.rpm*}) + promoteArtifacts.getPath() + createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) + signArtifacts.echo(PGP or Windows Signature Signing) + signArtifacts.fileExists(tests/jenkins/sign.sh) + signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) + signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) + signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.sh( + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig + ) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch-dashboards/2.0.0-rc1/, workingDir=tests/jenkins/artifacts/rpm/vars-build/2.0.0-rc1/33/linux/x64/rpm/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-2.0.0-rc1-linux-x64*}) diff --git a/tests/jenkins/jobs/PromoteArtifacts_Jenkinsfile.txt b/tests/jenkins/jobs/PromoteArtifacts_Jenkinsfile.txt index 39a26b8005..8eab96af73 100644 --- a/tests/jenkins/jobs/PromoteArtifacts_Jenkinsfile.txt +++ b/tests/jenkins/jobs/PromoteArtifacts_Jenkinsfile.txt @@ -8,36 +8,49 @@ promoteArtifacts.library({identifier=jenkins@20211123, retriever=null}) promoteArtifacts.readYaml({file=tests/jenkins/data/opensearch-1.3.0.yml}) InputManifest.asBoolean() - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/tar, path=vars-build/1.3.0/33/linux/x64/tar/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/discovery-ec2/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-ec2*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/transport-nio/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/transport-nio*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/discovery-gce/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-gce*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-ukrainian/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-ukrainian*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/discovery-azure-classic/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-azure-classic*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-phonetic/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-phonetic*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/mapper-murmur3/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-murmur3*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-kuromoji/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-kuromoji*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-stempel/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-stempel*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/mapper-annotated-text/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-annotated-text*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/repository-hdfs/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-hdfs*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-icu/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-icu*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/mapper-size/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-size*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/ingest-attachment/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/ingest-attachment*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/repository-azure/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-azure*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/repository-s3/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-s3*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-nori/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-nori*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/store-smb/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/store-smb*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-smartcn/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-smartcn*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/repository-gcs/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-gcs*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/core/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/dist/, includePathPattern=**/opensearch-min-1.3.0-linux-x64*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/dist/opensearch/, includePathPattern=**/opensearch-1.3.0-linux-x64*}) - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/rpm, path=vars-build/1.3.0/33/linux/x64/rpm/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch/core-plugins) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/dist/opensearch/, includePathPattern=**/opensearch-1.3.0-linux-x64*}) + promoteArtifacts.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + promoteArtifacts.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + promoteArtifacts.string({credentialsId=jenkins-artifact-promotion-role, variable=ARTIFACT_PROMOTION_ROLE_NAME}) + promoteArtifacts.string({credentialsId=jenkins-aws-production-account, variable=AWS_ACCOUNT_ARTIFACT}) + promoteArtifacts.string({credentialsId=jenkins-artifact-production-bucket-name, variable=ARTIFACT_PRODUCTION_BUCKET_NAME}) + promoteArtifacts.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME, ARTIFACT_PROMOTION_ROLE_NAME, AWS_ACCOUNT_ARTIFACT, ARTIFACT_PRODUCTION_BUCKET_NAME], groovy.lang.Closure) + promoteArtifacts.println(S3 download tar artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/tar, path=vars-build/1.3.0/33/linux/x64/tar/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core Plugins) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/discovery-ec2/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-ec2*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/transport-nio/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/transport-nio*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/discovery-gce/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-gce*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-ukrainian/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-ukrainian*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/discovery-azure-classic/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-azure-classic*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-phonetic/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-phonetic*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/mapper-murmur3/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-murmur3*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-kuromoji/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-kuromoji*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-stempel/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-stempel*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/mapper-annotated-text/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-annotated-text*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/repository-hdfs/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-hdfs*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-icu/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-icu*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/mapper-size/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-size*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/ingest-attachment/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/ingest-attachment*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/repository-azure/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-azure*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/repository-s3/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-s3*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-nori/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-nori*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/store-smb/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/store-smb*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-smartcn/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-smartcn*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/repository-gcs/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-gcs*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/core/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/dist/, includePathPattern=**/opensearch-min-1.3.0-linux-x64*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/dist/opensearch/, includePathPattern=**/opensearch-1.3.0-linux-x64*}) + promoteArtifacts.println(S3 download rpm artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/rpm, path=vars-build/1.3.0/33/linux/x64/rpm/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/dist/opensearch/, includePathPattern=**/opensearch-1.3.0-linux-x64*}) diff --git a/tests/jenkins/jobs/PromoteArtifacts_OpenSearch_Dashboards_Jenkinsfile.txt b/tests/jenkins/jobs/PromoteArtifacts_OpenSearch_Dashboards_Jenkinsfile.txt index d3529d9c6a..8528bfb223 100644 --- a/tests/jenkins/jobs/PromoteArtifacts_OpenSearch_Dashboards_Jenkinsfile.txt +++ b/tests/jenkins/jobs/PromoteArtifacts_OpenSearch_Dashboards_Jenkinsfile.txt @@ -8,16 +8,28 @@ promoteArtifacts.library({identifier=jenkins@20211123, retriever=null}) promoteArtifacts.readYaml({file=tests/jenkins/data/opensearch-dashboards-1.3.0.yml}) InputManifest.asBoolean() - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/tar, path=vars-build/1.3.0/33/linux/x64/tar/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/core-plugins) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/core/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/dist/, includePathPattern=**/opensearch-dashboards-min-1.3.0-linux-x64*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-1.3.0-linux-x64*}) - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/rpm, path=vars-build/1.3.0/33/linux/x64/rpm/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch-dashboards/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch-dashboards/core-plugins) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-1.3.0-linux-x64*}) + promoteArtifacts.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + promoteArtifacts.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + promoteArtifacts.string({credentialsId=jenkins-artifact-promotion-role, variable=ARTIFACT_PROMOTION_ROLE_NAME}) + promoteArtifacts.string({credentialsId=jenkins-aws-production-account, variable=AWS_ACCOUNT_ARTIFACT}) + promoteArtifacts.string({credentialsId=jenkins-artifact-production-bucket-name, variable=ARTIFACT_PRODUCTION_BUCKET_NAME}) + promoteArtifacts.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME, ARTIFACT_PROMOTION_ROLE_NAME, AWS_ACCOUNT_ARTIFACT, ARTIFACT_PRODUCTION_BUCKET_NAME], groovy.lang.Closure) + promoteArtifacts.println(S3 download tar artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/tar, path=vars-build/1.3.0/33/linux/x64/tar/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/core/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/dist/, includePathPattern=**/opensearch-dashboards-min-1.3.0-linux-x64*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-1.3.0-linux-x64*}) + promoteArtifacts.println(S3 download rpm artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/rpm, path=vars-build/1.3.0/33/linux/x64/rpm/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch-dashboards/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch-dashboards/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-1.3.0-linux-x64*}) diff --git a/tests/jenkins/jobs/PromoteArtifacts_actions_Jenkinsfile.txt b/tests/jenkins/jobs/PromoteArtifacts_actions_Jenkinsfile.txt index 717b762199..5a8c7420f8 100644 --- a/tests/jenkins/jobs/PromoteArtifacts_actions_Jenkinsfile.txt +++ b/tests/jenkins/jobs/PromoteArtifacts_actions_Jenkinsfile.txt @@ -12,111 +12,136 @@ promoteArtifacts.library({identifier=jenkins@20211123, retriever=null}) promoteArtifacts.readYaml({file=tests/jenkins/data/opensearch-1.3.0.yml}) InputManifest.asBoolean() - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/tar, path=vars-build/1.3.0/33/linux/x64/tar/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins) - createSha512Checksums.sh({script=find tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins -type f, returnStdout=true}) - createSha512Checksums.echo(Creating sha for tar_dummy_artifact_1.3.0.tar.gz) - createSha512Checksums.sh({script=sha512sum tar_dummy_artifact_1.3.0.tar.gz, returnStdout=true}) - createSha512Checksums.sh({script=basename tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/tar_dummy_artifact_1.3.0.tar.gz, returnStdout=true}) - createSha512Checksums.writeFile({file=tar_dummy_artifact_1.3.0.tar.gz.sha512, text=shaHashDummy_tar_dummy_artifact_1.3.0.tar.gz tar_dummy_artifact_1.3.0.tar.gz}) - createSha512Checksums.echo(Creating sha for zip_dummy_artifact_1.3.0.zip) - createSha512Checksums.sh({script=sha512sum zip_dummy_artifact_1.3.0.zip, returnStdout=true}) - createSha512Checksums.sh({script=basename tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/zip_dummy_artifact_1.3.0.zip, returnStdout=true}) - createSha512Checksums.writeFile({file=zip_dummy_artifact_1.3.0.zip.sha512, text=shaHashDummy_zip_dummy_artifact_1.3.0.zip zip_dummy_artifact_1.3.0.zip}) - createSha512Checksums.echo(Not generating sha for dummy_artifact_1.3.0.dummy in tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins, doesn't match allowed types [.tar.gz, .zip, .rpm]) - createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins}) - signArtifacts.echo(PGP Signature Signing) - signArtifacts.fileExists(tests/jenkins/sign.sh) - signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) - signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) - signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - tests/jenkins/sign.sh tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins --sigtype=.sig - ) - promoteArtifacts.findFiles({glob=**/opensearch-min-1.3.0*.tar*,**/opensearch-1.3.0*.tar*}) - promoteArtifacts.getPath() - createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) - createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - promoteArtifacts.findFiles({glob=**/opensearch-min-1.3.0*.tar*,**/opensearch-1.3.0*.tar*}) - promoteArtifacts.getPath() - createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) - signArtifacts.echo(PGP Signature Signing) - signArtifacts.fileExists(tests/jenkins/sign.sh) - signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) - signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) - signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig - ) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/discovery-ec2/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-ec2*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/transport-nio/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/transport-nio*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/discovery-gce/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-gce*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-ukrainian/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-ukrainian*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/discovery-azure-classic/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-azure-classic*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-phonetic/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-phonetic*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/mapper-murmur3/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-murmur3*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-kuromoji/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-kuromoji*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-stempel/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-stempel*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/mapper-annotated-text/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-annotated-text*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/repository-hdfs/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-hdfs*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-icu/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-icu*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/mapper-size/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-size*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/ingest-attachment/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/ingest-attachment*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/repository-azure/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-azure*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/repository-s3/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-s3*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-nori/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-nori*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/store-smb/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/store-smb*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/analysis-smartcn/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-smartcn*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/plugins/repository-gcs/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-gcs*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/core/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/dist/, includePathPattern=**/opensearch-min-1.3.0-linux-x64*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/dist/opensearch/, includePathPattern=**/opensearch-1.3.0-linux-x64*}) - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/rpm, path=vars-build/1.3.0/33/linux/x64/rpm/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch/core-plugins) - promoteArtifacts.findFiles({glob=**/opensearch-min-1.3.0*.rpm*,**/opensearch-1.3.0*.rpm*}) - promoteArtifacts.getPath() - createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) - createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - promoteArtifacts.findFiles({glob=**/opensearch-min-1.3.0*.rpm*,**/opensearch-1.3.0*.rpm*}) - promoteArtifacts.getPath() - createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) - signArtifacts.echo(PGP Signature Signing) - signArtifacts.fileExists(tests/jenkins/sign.sh) - signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) - signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) - signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig - ) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/dist/opensearch/, includePathPattern=**/opensearch-1.3.0-linux-x64*}) + promoteArtifacts.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + promoteArtifacts.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + promoteArtifacts.string({credentialsId=jenkins-artifact-promotion-role, variable=ARTIFACT_PROMOTION_ROLE_NAME}) + promoteArtifacts.string({credentialsId=jenkins-aws-production-account, variable=AWS_ACCOUNT_ARTIFACT}) + promoteArtifacts.string({credentialsId=jenkins-artifact-production-bucket-name, variable=ARTIFACT_PRODUCTION_BUCKET_NAME}) + promoteArtifacts.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME, ARTIFACT_PROMOTION_ROLE_NAME, AWS_ACCOUNT_ARTIFACT, ARTIFACT_PRODUCTION_BUCKET_NAME], groovy.lang.Closure) + promoteArtifacts.println(S3 download tar artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/tar, path=vars-build/1.3.0/33/linux/x64/tar/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core Plugins) + createSha512Checksums.sh({script=find tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins -type f, returnStdout=true}) + createSha512Checksums.echo(Creating sha for tar_dummy_artifact_1.3.0.tar.gz) + createSha512Checksums.sh({script=sha512sum tar_dummy_artifact_1.3.0.tar.gz, returnStdout=true}) + createSha512Checksums.sh({script=basename tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/tar_dummy_artifact_1.3.0.tar.gz, returnStdout=true}) + createSha512Checksums.writeFile({file=tar_dummy_artifact_1.3.0.tar.gz.sha512, text=shaHashDummy_tar_dummy_artifact_1.3.0.tar.gz tar_dummy_artifact_1.3.0.tar.gz}) + createSha512Checksums.echo(Creating sha for zip_dummy_artifact_1.3.0.zip) + createSha512Checksums.sh({script=sha512sum zip_dummy_artifact_1.3.0.zip, returnStdout=true}) + createSha512Checksums.sh({script=basename tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/zip_dummy_artifact_1.3.0.zip, returnStdout=true}) + createSha512Checksums.writeFile({file=zip_dummy_artifact_1.3.0.zip.sha512, text=shaHashDummy_zip_dummy_artifact_1.3.0.zip zip_dummy_artifact_1.3.0.zip}) + createSha512Checksums.echo(Not generating sha for dummy_artifact_1.3.0.dummy in tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins, doesn't match allowed types [.tar.gz, .zip, .rpm]) + createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins}) + signArtifacts.echo(PGP or Windows Signature Signing) + signArtifacts.fileExists(tests/jenkins/sign.sh) + signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) + signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) + signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.sh( + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + tests/jenkins/sign.sh tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins --sigtype=.sig + ) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.findFiles({glob=**/opensearch-min-1.3.0*.tar*,**/opensearch-1.3.0*.tar*}) + promoteArtifacts.getPath() + createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) + createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + promoteArtifacts.findFiles({glob=**/opensearch-min-1.3.0*.tar*,**/opensearch-1.3.0*.tar*}) + promoteArtifacts.getPath() + createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) + signArtifacts.echo(PGP or Windows Signature Signing) + signArtifacts.fileExists(tests/jenkins/sign.sh) + signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) + signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) + signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.sh( + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig + ) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/discovery-ec2/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-ec2*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/transport-nio/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/transport-nio*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/discovery-gce/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-gce*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-ukrainian/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-ukrainian*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/discovery-azure-classic/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/discovery-azure-classic*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-phonetic/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-phonetic*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/mapper-murmur3/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-murmur3*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-kuromoji/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-kuromoji*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-stempel/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-stempel*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/mapper-annotated-text/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-annotated-text*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/repository-hdfs/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-hdfs*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-icu/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-icu*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/mapper-size/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/mapper-size*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/ingest-attachment/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/ingest-attachment*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/repository-azure/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-azure*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/repository-s3/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-s3*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-nori/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-nori*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/store-smb/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/store-smb*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/analysis-smartcn/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/analysis-smartcn*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/plugins/repository-gcs/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/core-plugins/, includePathPattern=**/repository-gcs*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/core/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch/dist/, includePathPattern=**/opensearch-min-1.3.0-linux-x64*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/dist/opensearch/, includePathPattern=**/opensearch-1.3.0-linux-x64*}) + promoteArtifacts.println(S3 download rpm artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/rpm, path=vars-build/1.3.0/33/linux/x64/rpm/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.findFiles({glob=**/opensearch-min-1.3.0*.rpm*,**/opensearch-1.3.0*.rpm*}) + promoteArtifacts.getPath() + createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) + createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + promoteArtifacts.findFiles({glob=**/opensearch-min-1.3.0*.rpm*,**/opensearch-1.3.0*.rpm*}) + promoteArtifacts.getPath() + createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) + signArtifacts.echo(PGP or Windows Signature Signing) + signArtifacts.fileExists(tests/jenkins/sign.sh) + signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) + signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) + signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.sh( + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig + ) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch/1.3.0/, workingDir=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/dist/opensearch/, includePathPattern=**/opensearch-1.3.0-linux-x64*}) diff --git a/tests/jenkins/jobs/PromoteArtifacts_actions_OpenSearch_Dashboards_Jenkinsfile.txt b/tests/jenkins/jobs/PromoteArtifacts_actions_OpenSearch_Dashboards_Jenkinsfile.txt index 5fbb75ddb5..67219dffcd 100644 --- a/tests/jenkins/jobs/PromoteArtifacts_actions_OpenSearch_Dashboards_Jenkinsfile.txt +++ b/tests/jenkins/jobs/PromoteArtifacts_actions_OpenSearch_Dashboards_Jenkinsfile.txt @@ -12,64 +12,84 @@ promoteArtifacts.library({identifier=jenkins@20211123, retriever=null}) promoteArtifacts.readYaml({file=tests/jenkins/data/opensearch-dashboards-1.3.0.yml}) InputManifest.asBoolean() - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/tar, path=vars-build/1.3.0/33/linux/x64/tar/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/core-plugins) - promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-1.3.0*.tar*,**/opensearch-dashboards-1.3.0*.tar*}) - promoteArtifacts.getPath() - createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) - createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-1.3.0*.tar*,**/opensearch-dashboards-1.3.0*.tar*}) - promoteArtifacts.getPath() - createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) - signArtifacts.echo(PGP Signature Signing) - signArtifacts.fileExists(tests/jenkins/sign.sh) - signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) - signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) - signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig - ) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/core/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/dist/, includePathPattern=**/opensearch-dashboards-min-1.3.0-linux-x64*}) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-1.3.0-linux-x64*}) - promoteArtifacts.withAWS({role=downloadRoleName, roleAccount=publicAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Download({bucket=artifact-bucket, file=tests/jenkins/artifacts/rpm, path=vars-build/1.3.0/33/linux/x64/rpm/, force=true}) - promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch-dashboards/manifest.yml}) - promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch-dashboards/core-plugins) - promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-1.3.0*.rpm*,**/opensearch-dashboards-1.3.0*.rpm*}) - promoteArtifacts.getPath() - createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) - createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) - promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-1.3.0*.rpm*,**/opensearch-dashboards-1.3.0*.rpm*}) - promoteArtifacts.getPath() - createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) - signArtifacts.echo(PGP Signature Signing) - signArtifacts.fileExists(tests/jenkins/sign.sh) - signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) - signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) - signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig - ) - promoteArtifacts.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteArtifacts.s3Upload({bucket=prod-bucket-name, path=releases/bundle/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-1.3.0-linux-x64*}) + promoteArtifacts.string({credentialsId=jenkins-aws-account-public, variable=AWS_ACCOUNT_PUBLIC}) + promoteArtifacts.string({credentialsId=jenkins-artifact-bucket-name, variable=ARTIFACT_BUCKET_NAME}) + promoteArtifacts.string({credentialsId=jenkins-artifact-promotion-role, variable=ARTIFACT_PROMOTION_ROLE_NAME}) + promoteArtifacts.string({credentialsId=jenkins-aws-production-account, variable=AWS_ACCOUNT_ARTIFACT}) + promoteArtifacts.string({credentialsId=jenkins-artifact-production-bucket-name, variable=ARTIFACT_PRODUCTION_BUCKET_NAME}) + promoteArtifacts.withCredentials([AWS_ACCOUNT_PUBLIC, ARTIFACT_BUCKET_NAME, ARTIFACT_PROMOTION_ROLE_NAME, AWS_ACCOUNT_ARTIFACT, ARTIFACT_PRODUCTION_BUCKET_NAME], groovy.lang.Closure) + promoteArtifacts.println(S3 download tar artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/tar, path=vars-build/1.3.0/33/linux/x64/tar/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-1.3.0*.tar*,**/opensearch-dashboards-1.3.0*.tar*}) + promoteArtifacts.getPath() + createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) + createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-1.3.0*.tar*,**/opensearch-dashboards-1.3.0*.tar*}) + promoteArtifacts.getPath() + createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) + signArtifacts.echo(PGP or Windows Signature Signing) + signArtifacts.fileExists(tests/jenkins/sign.sh) + signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) + signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) + signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.sh( + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig + ) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/core/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/builds/opensearch-dashboards/dist/, includePathPattern=**/opensearch-dashboards-min-1.3.0-linux-x64*}) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/tar/vars-build/1.3.0/33/linux/x64/tar/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-1.3.0-linux-x64*}) + promoteArtifacts.println(S3 download rpm artifacts before creating signatures) + promoteArtifacts.withAWS({role=opensearch-bundle, roleAccount=AWS_ACCOUNT_PUBLIC, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Download({bucket=ARTIFACT_BUCKET_NAME, file=tests/jenkins/artifacts/rpm, path=vars-build/1.3.0/33/linux/x64/rpm/, force=true}) + promoteArtifacts.readYaml({file=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch-dashboards/manifest.yml}) + promoteArtifacts.fileExists(tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/builds/opensearch-dashboards/core-plugins) + promoteArtifacts.println(Signing Starts) + promoteArtifacts.println(Signing Core/Bundle Artifacts) + promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-1.3.0*.rpm*,**/opensearch-dashboards-1.3.0*.rpm*}) + promoteArtifacts.getPath() + createSha512Checksums.sh({script=find tests/jenkins/tests/jenkins/file/found.zip -type f, returnStdout=true}) + createSha512Checksums.echo(Not generating sha for bbb in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + createSha512Checksums.echo(Not generating sha for ccc in tests/jenkins/tests/jenkins/file/found.zip, doesn't match allowed types [.tar.gz, .zip, .rpm]) + promoteArtifacts.findFiles({glob=**/opensearch-dashboards-min-1.3.0*.rpm*,**/opensearch-dashboards-1.3.0*.rpm*}) + promoteArtifacts.getPath() + createSignatureFiles.signArtifacts({sigtype=.sig, artifactPath=tests/jenkins/tests/jenkins/file/found.zip}) + signArtifacts.echo(PGP or Windows Signature Signing) + signArtifacts.fileExists(tests/jenkins/sign.sh) + signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) + signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) + signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.sh( + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + tests/jenkins/sign.sh tests/jenkins/tests/jenkins/file/found.zip --sigtype=.sig + ) + promoteArtifacts.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteArtifacts.s3Upload({bucket=ARTIFACT_PRODUCTION_BUCKET_NAME, path=releases/bundle/opensearch-dashboards/1.3.0/, workingDir=tests/jenkins/artifacts/rpm/vars-build/1.3.0/33/linux/x64/rpm/dist/opensearch-dashboards/, includePathPattern=**/opensearch-dashboards-1.3.0-linux-x64*}) diff --git a/tests/jenkins/jobs/PromoteYumRepos_Jenkinsfile.txt b/tests/jenkins/jobs/PromoteYumRepos_Jenkinsfile.txt index 4da623bf79..8e8d36939c 100644 --- a/tests/jenkins/jobs/PromoteYumRepos_Jenkinsfile.txt +++ b/tests/jenkins/jobs/PromoteYumRepos_Jenkinsfile.txt @@ -8,75 +8,82 @@ promoteYumRepos.library({identifier=jenkins@20211123, retriever=null}) promoteYumRepos.readYaml({file=tests/jenkins/data/opensearch-1.3.0.yml}) InputManifest.asBoolean() - promoteYumRepos.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteYumRepos.println(Pulling Prod Yumrepo) - promoteYumRepos.sh(aws s3 sync s3://prod-bucket-name/releases/bundle/opensearch/1.x/yum/ /tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum/ --no-progress) - promoteYumRepos.sh( - set -e - set +x - set +x - - echo "Pulling 1.3.0 rpms" - cd /tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum - curl -SLO https://ci.opensearch.org/dbc/opensearch/1.3.0/123/linux/x64/rpm/dist/opensearch/opensearch-1.3.0-linux-x64.rpm - curl -SLO https://ci.opensearch.org/dbc/opensearch/1.3.0/123/linux/arm64/rpm/dist/opensearch/opensearch-1.3.0-linux-arm64.rpm - - ls -l - - rm -vf repodata/repomd.xml.asc - - echo "Update repo metadata" - createrepo --update . - - # Rename .xml to .pom for signing - # Please do not add .xml to signer filter - # As maven have many .xml and we do not want to sign them - # This is an outlier case for yum repo only - mv -v repodata/repomd.xml repodata/repomd.pom - - echo "Complete metadata update, awaiting signing repomd.xml" + promoteYumRepos.string({credentialsId=jenkins-artifact-promotion-role, variable=ARTIFACT_PROMOTION_ROLE_NAME}) + promoteYumRepos.string({credentialsId=jenkins-aws-production-account, variable=AWS_ACCOUNT_ARTIFACT}) + promoteYumRepos.string({credentialsId=jenkins-artifact-production-bucket-name, variable=ARTIFACT_PRODUCTION_BUCKET_NAME}) + promoteYumRepos.withCredentials([ARTIFACT_PROMOTION_ROLE_NAME, AWS_ACCOUNT_ARTIFACT, ARTIFACT_PRODUCTION_BUCKET_NAME], groovy.lang.Closure) + promoteYumRepos.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteYumRepos.println(Pulling Prod Yumrepo) + promoteYumRepos.sh(aws s3 sync s3://ARTIFACT_PRODUCTION_BUCKET_NAME/releases/bundle/opensearch/1.x/yum/ /tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum/ --no-progress) + promoteYumRepos.sh( + set -e + set +x + set +x + + echo "Pulling 1.3.0 rpms" + cd /tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum + curl -SLO https://ci.opensearch.org/dbc/opensearch/1.3.0/123/linux/x64/rpm/dist/opensearch/opensearch-1.3.0-linux-x64.rpm + curl -SLO https://ci.opensearch.org/dbc/opensearch/1.3.0/123/linux/arm64/rpm/dist/opensearch/opensearch-1.3.0-linux-arm64.rpm + + ls -l + + rm -vf repodata/repomd.xml.asc + + echo "Update repo metadata" + createrepo --update . + + # Rename .xml to .pom for signing + # Please do not add .xml to signer filter + # As maven have many .xml and we do not want to sign them + # This is an outlier case for yum repo only + mv -v repodata/repomd.xml repodata/repomd.pom + + echo "Complete metadata update, awaiting signing repomd.xml" cd - ) - promoteYumRepos.signArtifacts({artifactPath=/tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum/repodata/repomd.pom, sigtype=.sig, platform=linux}) - signArtifacts.echo(PGP Signature Signing) - signArtifacts.fileExists(/tmp/workspace/sign.sh) - signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) - signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) - signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) - signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - /tmp/workspace/sign.sh /tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum/repodata/repomd.pom --sigtype=.sig --platform=linux - ) - promoteYumRepos.sh( - set -e - set +x - - cd /tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum/repodata/ - - ls -l - - mv -v repomd.pom repomd.xml - mv -v repomd.pom.sig repomd.xml.sig - - # This step is required as yum only accept .asc and signing workflow only support .sig - cat repomd.xml.sig | gpg --enarmor | sed 's@ARMORED FILE@SIGNATURE@g' > repomd.xml.asc - - rm -vf repomd.xml.sig - - ls -l - - cd - - + promoteYumRepos.signArtifacts({artifactPath=/tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum/repodata/repomd.pom, sigtype=.sig, platform=linux}) + signArtifacts.echo(PGP or Windows Signature Signing) + signArtifacts.fileExists(/tmp/workspace/sign.sh) + signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) + signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) + signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.sh( + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + /tmp/workspace/sign.sh /tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum/repodata/repomd.pom --sigtype=.sig --platform=linux + ) + promoteYumRepos.sh( + set -e + set +x + + cd /tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum/repodata/ + + ls -l + + mv -v repomd.pom repomd.xml + mv -v repomd.pom.sig repomd.xml.sig + + # This step is required as yum only accept .asc and signing workflow only support .sig + cat repomd.xml.sig | gpg --enarmor | sed 's@ARMORED FILE@SIGNATURE@g' > repomd.xml.asc + + rm -vf repomd.xml.sig + + ls -l + + cd - ) - promoteYumRepos.withAWS({role=artifactPromotionRole, roleAccount=artifactsAccount, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) - promoteYumRepos.println(Pushing Prod Yumrepo) - promoteYumRepos.sh(aws s3 sync /tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum/ s3://prod-bucket-name/releases/bundle/opensearch/1.x/yum/ --no-progress) + promoteYumRepos.withAWS({role=ARTIFACT_PROMOTION_ROLE_NAME, roleAccount=AWS_ACCOUNT_ARTIFACT, duration=900, roleSessionName=jenkins-session}, groovy.lang.Closure) + promoteYumRepos.println(Pushing Prod Yumrepo) + promoteYumRepos.sh(aws s3 sync /tmp/workspace/artifacts/releases/bundle/opensearch/1.x/yum/ s3://ARTIFACT_PRODUCTION_BUCKET_NAME/releases/bundle/opensearch/1.x/yum/ --no-progress) diff --git a/tests/jenkins/jobs/PublishNotification_Jenkinsfile b/tests/jenkins/jobs/PublishNotification_Jenkinsfile index 8035188d23..7b70b8d269 100644 --- a/tests/jenkins/jobs/PublishNotification_Jenkinsfile +++ b/tests/jenkins/jobs/PublishNotification_Jenkinsfile @@ -10,7 +10,7 @@ pipeline { message: 'Successful Build', extra: 'extra', manifest: '1.2.0/opensearch-1.2.0.yml', - credentialsId: 'BUILD_NOTICE_WEBHOOK' + credentialsId: 'jenkins-build-notice-webhook' ) }catch (Exception e) { echo 'Exception occurred: ' + e.toString() diff --git a/tests/jenkins/jobs/PublishNotification_Jenkinsfile.txt b/tests/jenkins/jobs/PublishNotification_Jenkinsfile.txt index 241afd26f8..5a7add52e0 100644 --- a/tests/jenkins/jobs/PublishNotification_Jenkinsfile.txt +++ b/tests/jenkins/jobs/PublishNotification_Jenkinsfile.txt @@ -3,8 +3,8 @@ PublishNotification_Jenkinsfile.echo(Executing on agent [label:none]) PublishNotification_Jenkinsfile.stage(notify, groovy.lang.Closure) PublishNotification_Jenkinsfile.script(groovy.lang.Closure) - PublishNotification_Jenkinsfile.publishNotification({icon=:white_check_mark:, message=Successful Build, extra=extra, manifest=1.2.0/opensearch-1.2.0.yml, credentialsId=BUILD_NOTICE_WEBHOOK}) - publishNotification.string({credentialsId=BUILD_NOTICE_WEBHOOK, variable=WEBHOOK_URL}) + PublishNotification_Jenkinsfile.publishNotification({icon=:white_check_mark:, message=Successful Build, extra=extra, manifest=1.2.0/opensearch-1.2.0.yml, credentialsId=jenkins-build-notice-webhook}) + publishNotification.string({credentialsId=jenkins-build-notice-webhook, variable=WEBHOOK_URL}) publishNotification.withCredentials([WEBHOOK_URL], groovy.lang.Closure) publishNotification.sh(curl -XPOST --header "Content-Type: application/json" --data '{"result_text":":white_check_mark: JOB_NAME=dummy_job diff --git a/tests/jenkins/jobs/RunGradleCheck_Jenkinsfile.txt b/tests/jenkins/jobs/RunGradleCheck_Jenkinsfile.txt index 8d3524c227..47ab1dc84b 100644 --- a/tests/jenkins/jobs/RunGradleCheck_Jenkinsfile.txt +++ b/tests/jenkins/jobs/RunGradleCheck_Jenkinsfile.txt @@ -10,12 +10,11 @@ runGradleCheck.usernamePassword({credentialsId=jenkins-gradle-check-s3-aws-resources, usernameVariable=amazon_s3_base_path, passwordVariable=amazon_s3_bucket}) runGradleCheck.withCredentials([[amazon_s3_access_key, amazon_s3_secret_key], [amazon_s3_base_path, amazon_s3_bucket]], groovy.lang.Closure) runGradleCheck.sh( + #!/bin/bash set -e set +x - env | grep JAVA | grep HOME - echo "Git clone: https://github.com/opensearch-project/OpenSearch with ref: main" rm -rf search git clone https://github.com/opensearch-project/OpenSearch search @@ -23,9 +22,24 @@ git checkout -f main git rev-parse HEAD - echo "Stop existing gradledaemon" + echo "Get Major Version" + OS_VERSION=`cat buildSrc/version.properties | grep opensearch | cut -d= -f2 | grep -oE '[0-9.]+'` + OS_MAJOR_VERSION=`echo $OS_VERSION | grep -oE '[0-9]+' | head -n 1` + echo "Version: $OS_VERSION, Major Version: $OS_MAJOR_VERSION" + + if [ "$OS_MAJOR_VERSION" -lt 2 ]; then + echo "Using JAVA 11" + export JAVA_HOME=$JAVA11_HOME + else + echo "Using JAVA 17" + export JAVA_HOME=$JAVA17_HOME + fi + + env | grep JAVA | grep HOME + + echo "Gradle clean cache and stop existing gradledaemon" ./gradlew --stop - find ~/.gradle -type f -name "*.lock" -delete + rm -rf ~/.gradle echo "Check existing dockercontainer" docker ps -a @@ -37,9 +51,15 @@ echo "Check docker-compose version" docker-compose version + echo "Check existing processes" + ps -ef | grep [o]pensearch | wc -l + echo "Cleanup existing processes" + kill -9 `ps -ef | grep [o]pensearch | awk '{print $2}'` > /dev/null 2>&1 || echo + ps -ef | grep [o]pensearch | wc -l + echo "Start gradlecheck" GRADLE_CHECK_STATUS=0 - ./gradlew check -Dtests.coverage=true --no-daemon --no-scan || GRADLE_CHECK_STATUS=1 + ./gradlew clean && ./gradlew check -Dtests.coverage=true --no-daemon --no-scan || GRADLE_CHECK_STATUS=1 if [ "$GRADLE_CHECK_STATUS" != 0 ]; then echo Gradle Check Failed! diff --git a/tests/jenkins/jobs/SignArtifacts_Jenkinsfile.txt b/tests/jenkins/jobs/SignArtifacts_Jenkinsfile.txt index 6bf5908794..e492e627fa 100644 --- a/tests/jenkins/jobs/SignArtifacts_Jenkinsfile.txt +++ b/tests/jenkins/jobs/SignArtifacts_Jenkinsfile.txt @@ -4,28 +4,32 @@ SignArtifacts_Jenkinsfile.stage(sign, groovy.lang.Closure) SignArtifacts_Jenkinsfile.script(groovy.lang.Closure) SignArtifacts_Jenkinsfile.signArtifacts({artifactPath=/tmp/workspace/artifacts, sigtype=.sig, platform=linux}) - signArtifacts.echo(PGP Signature Signing) + signArtifacts.echo(PGP or Windows Signature Signing) signArtifacts.fileExists(/tmp/workspace/sign.sh) signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - /tmp/workspace/sign.sh /tmp/workspace/artifacts --sigtype=.sig --platform=linux - ) + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + /tmp/workspace/sign.sh /tmp/workspace/artifacts --sigtype=.sig --platform=linux + ) SignArtifacts_Jenkinsfile.signArtifacts({artifactPath=/tmp/workspace/artifacts, sigtype=.rpm, platform=linux}) - signArtifacts.echo(RPM Add Sign) - signArtifacts.withAWS({role=sign_asm_role, roleAccount=sign_asm_account, duration=900, roleSessionName=jenkins-signing-session}, groovy.lang.Closure) - signArtifacts.string({credentialsId=jenkins-rpm-signing-asm-pass-id, variable=SIGNING_PASS_ID}) - signArtifacts.string({credentialsId=jenkins-rpm-signing-asm-secret-id, variable=SIGNING_SECRET_ID}) - signArtifacts.withCredentials([SIGNING_PASS_ID, SIGNING_SECRET_ID], groovy.lang.Closure) + signArtifacts.string({credentialsId=jenkins-rpm-signing-props, variable=configs}) + signArtifacts.withCredentials([configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) + signArtifacts.echo(RPM Add Sign) + signArtifacts.withAWS({role=jenki-jenki-asm-assume-role, roleAccount=null, duration=900, roleSessionName=jenkins-signing-session}, groovy.lang.Closure) signArtifacts.sh( set -e set +x @@ -65,8 +69,8 @@ echo "------------------------------------------------------------------------" echo "Import OpenSearch keys" - aws secretsmanager get-secret-value --region "sign_asm_region" --secret-id "SIGNING_PASS_ID" | jq -r .SecretBinary | base64 --decode > passphrase - aws secretsmanager get-secret-value --region "sign_asm_region" --secret-id "SIGNING_SECRET_ID" | jq -r .SecretBinary | base64 --decode | gpg --quiet --import --pinentry-mode loopback --passphrase-file passphrase - + aws secretsmanager get-secret-value --region us-west-2 --secret-id "null" | jq -r .SecretBinary | base64 --decode > passphrase + aws secretsmanager get-secret-value --region us-west-2 --secret-id "null" | jq -r .SecretBinary | base64 --decode | gpg --quiet --import --pinentry-mode loopback --passphrase-file passphrase - echo "------------------------------------------------------------------------" echo "Start Signing Rpm" @@ -93,25 +97,29 @@ echo "------------------------------------------------------------------------" echo "Clean up gpg" - gpg --batch --yes --delete-secret-keys sign_asm_keyid - gpg --batch --yes --delete-keys sign_asm_keyid + gpg --batch --yes --delete-secret-keys null + gpg --batch --yes --delete-keys null rm -v passphrase ) SignArtifacts_Jenkinsfile.signArtifacts({artifactPath=/tmp/workspace/file.yml, platform=linux, type=maven}) - signArtifacts.echo(PGP Signature Signing) + signArtifacts.echo(PGP or Windows Signature Signing) signArtifacts.fileExists(/tmp/workspace/sign.sh) signArtifacts.git({url=https://github.com/opensearch-project/opensearch-build.git, branch=main}) signArtifacts.sh(curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -) signArtifacts.usernamePassword({credentialsId=github_bot_token_name, usernameVariable=GITHUB_USER, passwordVariable=GITHUB_TOKEN}) - signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN]], groovy.lang.Closure) + signArtifacts.string({credentialsId=signer-pgp-config, variable=configs}) + signArtifacts.withCredentials([[GITHUB_USER, GITHUB_TOKEN], configs], groovy.lang.Closure) + signArtifacts.readJSON({text=configs}) signArtifacts.sh( - #!/bin/bash - set +x - export ROLE=dummy_signer_client_role - export EXTERNAL_ID=signer_client_external_id - export UNSIGNED_BUCKET=signer_client_unsigned_bucket - export SIGNED_BUCKET=signer_client_signed_bucket - - /tmp/workspace/sign.sh /tmp/workspace/file.yml --platform=linux --type=maven - ) + #!/bin/bash + set +x + export ROLE=dummy_role + export EXTERNAL_ID=dummy_ID + export UNSIGNED_BUCKET=dummy_unsigned_bucket + export SIGNED_BUCKET=dummy_signed_bucket + export PROFILE_IDENTIFIER=null + export PLATFORM_IDENTIFIER=null + + /tmp/workspace/sign.sh /tmp/workspace/file.yml --platform=linux --type=maven + ) diff --git a/tests/jenkins/lib-testers/CreateReleaseTagLibTester.groovy b/tests/jenkins/lib-testers/CreateReleaseTagLibTester.groovy index 09087312b1..268532c9fe 100644 --- a/tests/jenkins/lib-testers/CreateReleaseTagLibTester.groovy +++ b/tests/jenkins/lib-testers/CreateReleaseTagLibTester.groovy @@ -1,4 +1,4 @@ -import jenkins.BuildManifest +import jenkins.BundleManifest import org.yaml.snakeyaml.Yaml import static org.hamcrest.CoreMatchers.notNullValue @@ -9,12 +9,12 @@ class CreateReleaseTagLibTester extends LibFunctionTester { private String distManifest private String tagVersion - private ArrayList buildManifestComponentsList + private ArrayList bundleManifestComponentsList public CreateReleaseTagLibTester(distManifest, tagVersion){ this.distManifest = distManifest this.tagVersion = tagVersion - this.buildManifestComponentsList = [] + this.bundleManifestComponentsList = [] } void parameterInvariantsAssertions(call){ @@ -25,7 +25,7 @@ class CreateReleaseTagLibTester extends LibFunctionTester { boolean expectedParametersMatcher(call) { return call.args.distManifest.first().toString().equals(this.distManifest) && call.args.tagVersion.first().toString().equals(this.tagVersion) - && this.buildManifestComponentsList.size() > 1 + && this.bundleManifestComponentsList.size() > 1 } String libFunctionName(){ @@ -42,11 +42,11 @@ class CreateReleaseTagLibTester extends LibFunctionTester { InputStream inputStream = new FileInputStream(new File(this.distManifest)); Yaml yaml = new Yaml() Map ymlMap = yaml.load(inputStream) - BuildManifest buildManifestObj = new BuildManifest(ymlMap) - this.buildManifestComponentsList = buildManifestObj.getNames() + BundleManifest bundleManifestObj = new BundleManifest(ymlMap) + this.bundleManifestComponentsList = bundleManifestObj.getNames() boolean checkFirst = true - for (component in this.buildManifestComponentsList) { - def repo = buildManifestObj.getRepo(component) + for (component in this.bundleManifestComponentsList) { + def repo = bundleManifestObj.getRepo(component) def version = tagVersion if (tagVersion.contains("-")) { version = tagVersion.split("-").first() + ".0-" + tagVersion.split("-").last() @@ -58,7 +58,7 @@ class CreateReleaseTagLibTester extends LibFunctionTester { } def out = "" if (checkFirst) { - out = buildManifestObj.getCommitId(component) + out = bundleManifestObj.getCommitId(component) checkFirst = false } helper.addShMock("git ls-remote --tags $repo $version | awk 'NR==1{print \$1}'") { script -> diff --git a/tests/jenkins/lib-testers/SignArtifactsLibTester.groovy b/tests/jenkins/lib-testers/SignArtifactsLibTester.groovy index 40d502246d..d49aec143e 100644 --- a/tests/jenkins/lib-testers/SignArtifactsLibTester.groovy +++ b/tests/jenkins/lib-testers/SignArtifactsLibTester.groovy @@ -1,7 +1,6 @@ import static org.hamcrest.CoreMatchers.notNullValue import static org.hamcrest.MatcherAssert.assertThat - class SignArtifactsLibTester extends LibFunctionTester { private String sigtype @@ -10,7 +9,7 @@ class SignArtifactsLibTester extends LibFunctionTester { private String type private String component - public SignArtifactsLibTester(sigtype, platform, artifactPath, type, component){ + public SignArtifactsLibTester(sigtype, platform, artifactPath, type, component) { this.sigtype = sigtype this.platform = platform this.artifactPath = artifactPath @@ -20,18 +19,28 @@ class SignArtifactsLibTester extends LibFunctionTester { void configure(helper, binding) { binding.setVariable('GITHUB_BOT_TOKEN_NAME', 'github_bot_token_name') - binding.setVariable('SIGNER_CLIENT_ROLE', 'dummy_signer_client_role') - binding.setVariable('SIGNER_CLIENT_EXTERNAL_ID', 'signer_client_external_id') - binding.setVariable('SIGNER_CLIENT_UNSIGNED_BUCKET', 'signer_client_unsigned_bucket') - binding.setVariable('SIGNER_CLIENT_SIGNED_BUCKET', 'signer_client_signed_bucket') - binding.setVariable('SIGN_ASM_ROLE', 'sign_asm_role') - binding.setVariable('SIGN_ASM_ACCOUNT', 'sign_asm_account') - binding.setVariable('SIGN_ASM_REGION', 'sign_asm_region') - binding.setVariable('SIGN_ASM_KEYID', 'sign_asm_keyid') - - helper.registerAllowedMethod("git", [Map]) - helper.registerAllowedMethod("withCredentials", [Map]) - helper.registerAllowedMethod("withAWS", [Map, Closure], { args, closure -> + if (this.sigtype.equals('.rpm')) { + def configs = ['account': '1234', + 'passphrase_secrets_arn': 'ARN::123456', + 'secret_key_id_secrets_arn': 'ARN::56789', + 'key_id': 'abcd1234'] + binding.setVariable('configs', configs) + helper.registerAllowedMethod('readJSON', [Map.class], { c -> configs }) + } + else { + def configs = ["role": "dummy_role", + "external_id": "dummy_ID", + "unsigned_bucket": "dummy_unsigned_bucket", + "signed_bucket": "dummy_signed_bucket"] + binding.setVariable('configs', configs) + helper.registerAllowedMethod('readJSON', [Map.class], { c -> configs }) + } + helper.registerAllowedMethod('git', [Map]) + helper.registerAllowedMethod('withCredentials', [Map, Closure], { args, closure -> + closure.delegate = delegate + return helper.callClosure(closure) + }) + helper.registerAllowedMethod('withAWS', [Map, Closure], { args, closure -> closure.delegate = delegate return helper.callClosure(closure) }) @@ -40,15 +49,15 @@ class SignArtifactsLibTester extends LibFunctionTester { void parameterInvariantsAssertions(call) { assertThat(call.args.artifactPath.first(), notNullValue()) assertThat(call.args.platform.first(), notNullValue()) - if(call.args.artifactPath.first().toString().endsWith(".yml")){ + if (call.args.artifactPath.first().toString().endsWith('.yml')) { assertThat(call.args.type.first(), notNullValue()) - } else if(call.args.type.first() != 'maven'){ + } else if (call.args.type.first() != 'maven') { assertThat(call.args.sigtype.first(), notNullValue()) } } boolean expectedParametersMatcher(call) { - if(call.args.artifactPath.first().toString().endsWith(".yml")){ + if (call.args.artifactPath.first().toString().endsWith('.yml')) { return call.args.platform.first().toString().equals(this.platform) && call.args.artifactPath.first().toString().equals(this.artifactPath) && call.args.type.first().toString().equals(this.type) @@ -63,4 +72,5 @@ class SignArtifactsLibTester extends LibFunctionTester { String libFunctionName() { return 'signArtifacts' } + } diff --git a/tests/test_run_sign.py b/tests/test_run_sign.py index 89e3186e74..606a1fea5d 100644 --- a/tests/test_run_sign.py +++ b/tests/test_run_sign.py @@ -33,8 +33,7 @@ def test_usage(self, *mocks: Any) -> None: @patch("argparse._sys.argv", ["run_sign.py", BUILD_MANIFEST]) @patch("run_sign.SignArtifacts") - @patch("run_sign.Signer") - def test_main(self, mock_signer: Mock, mock_sign_artifacts: Mock, *mocks: Any) -> None: + def test_main(self, mock_sign_artifacts: Mock, *mocks: Any) -> None: main() mock_sign_artifacts.from_path.assert_called_once() diff --git a/tests/tests_sign_workflow/test_sign_artifacts.py b/tests/tests_sign_workflow/test_sign_artifacts.py index 0f37d07d1b..5dffa37489 100644 --- a/tests/tests_sign_workflow/test_sign_artifacts.py +++ b/tests/tests_sign_workflow/test_sign_artifacts.py @@ -10,19 +10,20 @@ class TestSignArtifacts(unittest.TestCase): @patch("sign_workflow.signer.GitRepository") - @patch("sign_workflow.signer.Signer", return_value=MagicMock()) + @patch("sign_workflow.signer_pgp.SignerPGP", return_value=MagicMock()) def test_from_path_method(self, mock_signer: Mock, *mocks: Any) -> None: components = ['maven'] artifact_type = 'dummy' sigtype = '.asc' + platform = 'linux' - klass = SignArtifacts.from_path(Path(r"/dummy/path/manifest.yml"), components, artifact_type, sigtype, mock_signer) + klass = SignArtifacts.from_path(Path(r"/dummy/path/manifest.yml"), components, artifact_type, sigtype, platform) self.assertEqual(type(SignWithBuildManifest), type(klass.__class__)) - klass = SignArtifacts.from_path(Path(os.path.dirname(__file__)), components, artifact_type, sigtype, mock_signer) + klass = SignArtifacts.from_path(Path(os.path.dirname(__file__)), components, artifact_type, sigtype, platform) self.assertEqual(type(SignExistingArtifactsDir), type(klass.__class__)) - klass = SignArtifacts.from_path(Path(r"/dummy/path/artifact.tar.gz"), components, artifact_type, sigtype, mock_signer) + klass = SignArtifacts.from_path(Path(r"/dummy/path/artifact.tar.gz"), components, artifact_type, sigtype, platform) self.assertEqual(type(SignArtifactsExistingArtifactFile), type(klass.__class__)) def test_signer_class(self) -> None: @@ -38,17 +39,20 @@ def test_signer_class(self) -> None: Path(r"/dummy/path/artifact.tar.gz")), SignArtifactsExistingArtifactFile) - def test_sign_with_build_manifest(self) -> None: + @patch("sign_workflow.signer.GitRepository") + def test_sign_with_build_manifest(self, mock_repo: Mock) -> None: manifest = Path(os.path.join(os.path.dirname(__file__), "data", "opensearch-build-1.1.0.yml")) sigtype = '.asc' - signer = MagicMock() + platform = 'windows' signer_with_manifest = SignWithBuildManifest( target=manifest, components=[], artifact_type="maven", signature_type=sigtype, - signer=signer + platform=platform ) + signer = MagicMock() + signer_with_manifest.signer = signer signer_with_manifest.sign() expected = [ 'maven/org/opensearch/opensearch-performance-analyzer/maven-metadata-local.xml', @@ -60,35 +64,42 @@ def test_sign_with_build_manifest(self) -> None: ] signer.sign_artifacts.assert_called_with(expected, manifest.parent, sigtype) - def test_sign_existing_artifacts_file(self) -> None: + @patch("sign_workflow.signer.GitRepository") + def test_sign_existing_artifacts_file(self, mock_repo: Mock) -> None: path = Path(r"/dummy/path/file.tar.gz") sigtype = '.sig' - signer = MagicMock() + platform = 'linux' signer_with_manifest = SignArtifactsExistingArtifactFile( target=path, components=['maven'], artifact_type='dummy', signature_type=sigtype, - signer=signer + platform=platform ) + signer = MagicMock() + signer_with_manifest.signer = signer signer_with_manifest.sign() - signer.sign_artifact.assert_called_with("file.tar.gz", path.parent, sigtype) + expected = 'file.tar.gz' + signer.sign_artifact.assert_called_with(expected, path.parent, sigtype) + @patch("sign_workflow.signer.GitRepository") @patch('os.walk') - def test_sign_existing_artifacts_folder(self, mock_os_walk: Mock) -> None: + def test_sign_existing_artifacts_folder(self, mock_os_walk: Mock, mock_repo: Mock) -> None: mock_os_walk.return_value = [ ('dummy', (), ['tar_dummy_artifact_1.0.0.tar.gz', 'zip_dummy_artifact_1.1.0.zip']) ] path = Path('dummy') sigtype = '.sig' - signer = MagicMock() + platform = 'linux' signer_with_manifest = SignExistingArtifactsDir( target=path, components=['maven'], artifact_type='dummy', signature_type=sigtype, - signer=signer + platform=platform ) + signer = MagicMock() + signer_with_manifest.signer = signer signer_with_manifest.sign() expected = ["tar_dummy_artifact_1.0.0.tar.gz", "zip_dummy_artifact_1.1.0.zip"] signer.sign_artifacts.assert_called_with(expected, path, sigtype) diff --git a/tests/tests_sign_workflow/test_signer.py b/tests/tests_sign_workflow/test_signer.py index 51d4b5a8b2..00ad5ad4c2 100644 --- a/tests/tests_sign_workflow/test_signer.py +++ b/tests/tests_sign_workflow/test_signer.py @@ -7,128 +7,46 @@ class TestSigner(unittest.TestCase): - @patch("sign_workflow.signer.GitRepository") - def test_accepted_file_types_asc(self, git_repo: Mock) -> None: - artifacts = [ - "bad-xml.xml", - "the-jar.jar", - "the-zip.zip", - "the-whl.whl", - "the-rpm.rpm", - "the-war.war", - "the-pom.pom", - "the-module.module", - "the-tar.tar.gz", - "random-file.txt", - "something-1.0.0.0.jar", - ] - expected = [ - call(os.path.join("path", "the-jar.jar"), ".asc"), - call(os.path.join("path", "the-zip.zip"), ".asc"), - call(os.path.join("path", "the-whl.whl"), ".asc"), - call(os.path.join("path", "the-rpm.rpm"), ".asc"), - call(os.path.join("path", "the-war.war"), ".asc"), - call(os.path.join("path", "the-pom.pom"), ".asc"), - call(os.path.join("path", "the-module.module"), ".asc"), - call(os.path.join("path", "the-tar.tar.gz"), ".asc"), - call(os.path.join("path", "something-1.0.0.0.jar"), ".asc"), - ] - signer = Signer() - signer.sign = MagicMock() # type: ignore - signer.sign_artifacts(artifacts, Path("path"), ".asc") - self.assertEqual(signer.sign.call_args_list, expected) + class DummySigner(Signer): + def generate_signature_and_verify(self, artifact: str, basepath: Path, signature_type: str) -> None: + pass - @patch("sign_workflow.signer.GitRepository") - def test_accepted_file_types_sig(self, git_repo: Mock) -> None: - artifacts = [ - "bad-xml.xml", - "the-jar.jar", - "the-zip.zip", - "the-whl.whl", - "the-rpm.rpm", - "the-war.war", - "the-pom.pom", - "the-module.module", - "the-tar.tar.gz", - "random-file.txt", - "something-1.0.0.0.jar", - "opensearch_sql_cli-1.0.0-py3-none-any.whl", - "cratefile.crate" - ] - expected = [ - call(os.path.join("path", "the-jar.jar"), ".sig"), - call(os.path.join("path", "the-zip.zip"), ".sig"), - call(os.path.join("path", "the-whl.whl"), ".sig"), - call(os.path.join("path", "the-rpm.rpm"), ".sig"), - call(os.path.join("path", "the-war.war"), ".sig"), - call(os.path.join("path", "the-pom.pom"), ".sig"), - call(os.path.join("path", "the-module.module"), ".sig"), - call(os.path.join("path", "the-tar.tar.gz"), ".sig"), - call(os.path.join("path", "something-1.0.0.0.jar"), ".sig"), - call(os.path.join("path", "opensearch_sql_cli-1.0.0-py3-none-any.whl"), ".sig"), - call(os.path.join("path", "cratefile.crate"), ".sig") - ] - signer = Signer() - signer.sign = MagicMock() # type: ignore - signer.sign_artifacts(artifacts, Path("path"), ".sig") - self.assertEqual(signer.sign.call_args_list, expected) + def is_valid_file_type(self, file_name: str) -> bool: + return file_name.endswith('zip') + + def sign(self, artifact: str, basepath: Path, signature_type: str) -> None: + pass @patch("sign_workflow.signer.GitRepository") def test_signer_checks_out_tool(self, mock_repo: Mock) -> None: - Signer() + self.DummySigner() self.assertEqual(mock_repo.return_value.execute.call_count, 2) mock_repo.return_value.execute.assert_has_calls([call("./bootstrap"), call("rm config.cfg")]) - @patch("sign_workflow.signer.GitRepository") - def test_signer_verify_asc(self, mock_repo: Mock) -> None: - signer = Signer() - signer.verify("/path/the-jar.jar.asc") - mock_repo.assert_has_calls([call().execute("gpg --verify-files /path/the-jar.jar.asc")]) - - @patch("sign_workflow.signer.GitRepository") - def test_signer_verify_sig(self, mock_repo: Mock) -> None: - signer = Signer() - signer.verify("/path/the-jar.jar.sig") - mock_repo.assert_has_calls([call().execute("gpg --verify-files /path/the-jar.jar.sig")]) - - @patch("sign_workflow.signer.GitRepository") - def test_signer_sign_asc(self, mock_repo: Mock) -> None: - signer = Signer() - signer.sign("/path/the-jar.jar", ".asc") - mock_repo.assert_has_calls( - [call().execute("./opensearch-signer-client -i /path/the-jar.jar -o /path/the-jar.jar.asc -p pgp")]) - - @patch("sign_workflow.signer.GitRepository") - def test_signer_sign_sig(self, mock_repo: Mock) -> None: - signer = Signer() - signer.sign("/path/the-jar.jar", ".sig") - mock_repo.assert_has_calls( - [call().execute("./opensearch-signer-client -i /path/the-jar.jar -o /path/the-jar.jar.sig -p pgp")]) - @patch("sign_workflow.signer.GitRepository") def test_sign_artifact_not_called(self, mock_repo: Mock) -> None: - signer = Signer() + signer = self.DummySigner() signer.generate_signature_and_verify = MagicMock() # type: ignore signer.sign_artifact("the-jar.notvalid", Path("/path"), ".sig") signer.generate_signature_and_verify.assert_not_called() @patch("sign_workflow.signer.GitRepository") def test_sign_artifact_called(self, mock_repo: Mock) -> None: - signer = Signer() + signer = self.DummySigner() signer.generate_signature_and_verify = MagicMock() # type: ignore signer.sign_artifact("the-jar.zip", Path("/path"), ".sig") signer.generate_signature_and_verify.assert_called_with("the-jar.zip", Path("/path"), ".sig") @patch("sign_workflow.signer.GitRepository") def test_remove_existing_signature_found(self, mock_repo: Mock) -> None: - signer = Signer() + signer = self.DummySigner() os.remove = MagicMock() - signer.sign("tests/tests_sign_workflow/data/signature/tar_dummy_artifact_1.0.0.tar.gz", ".sig") + signer.__remove_existing_signature__("tests/tests_sign_workflow/data/signature/tar_dummy_artifact_1.0.0.tar.gz.sig") os.remove.assert_called_with("tests/tests_sign_workflow/data/signature/tar_dummy_artifact_1.0.0.tar.gz.sig") @patch("sign_workflow.signer.GitRepository") def test_remove_existing_signature_not_found(self, mock_repo: Mock) -> None: - signer = Signer() + signer = self.DummySigner() os.remove = MagicMock() - signer.sign("tests/tests_sign_workflow/data/signature/not_found.tar.gz", ".sig") + signer.__remove_existing_signature__("tests/tests_sign_workflow/data/signature/not_found.tar.gz.sig") os.remove.assert_not_called() diff --git a/tests/tests_sign_workflow/test_signer_pgp.py b/tests/tests_sign_workflow/test_signer_pgp.py new file mode 100644 index 0000000000..08153fa114 --- /dev/null +++ b/tests/tests_sign_workflow/test_signer_pgp.py @@ -0,0 +1,104 @@ +import os +import unittest +from pathlib import Path +from unittest.mock import MagicMock, Mock, call, patch + +from sign_workflow.signer_pgp import SignerPGP + + +class TestSignerPGP(unittest.TestCase): + @patch("sign_workflow.signer.GitRepository") + def test_accepted_file_types_asc(self, git_repo: Mock) -> None: + artifacts = [ + "bad-xml.xml", + "the-jar.jar", + "the-zip.zip", + "the-whl.whl", + "the-rpm.rpm", + "the-war.war", + "the-pom.pom", + "the-module.module", + "the-tar.tar.gz", + "random-file.txt", + "something-1.0.0.0.jar", + ] + expected = [ + call("the-jar.jar", Path("path"), ".asc"), + call("the-zip.zip", Path("path"), ".asc"), + call("the-whl.whl", Path("path"), ".asc"), + call("the-rpm.rpm", Path("path"), ".asc"), + call("the-war.war", Path("path"), ".asc"), + call("the-pom.pom", Path("path"), ".asc"), + call("the-module.module", Path("path"), ".asc"), + call("the-tar.tar.gz", Path("path"), ".asc"), + call("something-1.0.0.0.jar", Path("path"), ".asc"), + ] + signer = SignerPGP() + signer.sign = MagicMock() # type: ignore + signer.verify = MagicMock() # type: ignore + signer.sign_artifacts(artifacts, Path("path"), ".asc") + self.assertEqual(signer.sign.call_args_list, expected) + + @patch("sign_workflow.signer.GitRepository") + def test_accepted_file_types_sig(self, git_repo: Mock) -> None: + artifacts = [ + "bad-xml.xml", + "the-jar.jar", + "the-zip.zip", + "the-whl.whl", + "the-rpm.rpm", + "the-war.war", + "the-pom.pom", + "the-module.module", + "the-tar.tar.gz", + "random-file.txt", + "something-1.0.0.0.jar", + "opensearch_sql_cli-1.0.0-py3-none-any.whl", + "cratefile.crate" + ] + expected = [ + call("the-jar.jar", Path("path"), ".sig"), + call("the-zip.zip", Path("path"), ".sig"), + call("the-whl.whl", Path("path"), ".sig"), + call("the-rpm.rpm", Path("path"), ".sig"), + call("the-war.war", Path("path"), ".sig"), + call("the-pom.pom", Path("path"), ".sig"), + call("the-module.module", Path("path"), ".sig"), + call("the-tar.tar.gz", Path("path"), ".sig"), + call("something-1.0.0.0.jar", Path("path"), ".sig"), + call("opensearch_sql_cli-1.0.0-py3-none-any.whl", Path("path"), ".sig"), + call("cratefile.crate", Path("path"), ".sig") + ] + signer = SignerPGP() + signer.sign = MagicMock() # type: ignore + signer.verify = MagicMock() # type: ignore + signer.sign_artifacts(artifacts, Path("path"), ".sig") + self.assertEqual(signer.sign.call_args_list, expected) + + @patch("sign_workflow.signer.GitRepository") + def test_signer_verify_asc(self, mock_repo: Mock) -> None: + signer = SignerPGP() + signer.verify("/path/the-jar.jar.asc") + mock_repo.assert_has_calls([call().execute("gpg --verify-files /path/the-jar.jar.asc")]) + + @patch("sign_workflow.signer.GitRepository") + def test_signer_verify_sig(self, mock_repo: Mock) -> None: + signer = SignerPGP() + signer.verify("/path/the-jar.jar.sig") + mock_repo.assert_has_calls([call().execute("gpg --verify-files /path/the-jar.jar.sig")]) + + @patch("sign_workflow.signer.GitRepository") + def test_signer_sign_asc(self, mock_repo: Mock) -> None: + signer = SignerPGP() + signer.sign("the-jar.jar", Path("/path/"), ".asc") + command = "./opensearch-signer-client -i " + os.path.join(Path("/path/"), 'the-jar.jar') + " -o " + os.path.join(Path("/path/"), 'the-jar.jar.asc') + " -p pgp" + mock_repo.assert_has_calls( + [call().execute(command)]) + + @patch("sign_workflow.signer.GitRepository") + def test_signer_sign_sig(self, mock_repo: Mock) -> None: + signer = SignerPGP() + signer.sign("the-jar.jar", Path("/path/"), ".sig") + command = "./opensearch-signer-client -i " + os.path.join(Path("/path/"), 'the-jar.jar') + " -o " + os.path.join(Path("/path/"), 'the-jar.jar.sig') + " -p pgp" + mock_repo.assert_has_calls( + [call().execute(command)]) diff --git a/tests/tests_sign_workflow/test_signer_windows.py b/tests/tests_sign_workflow/test_signer_windows.py new file mode 100644 index 0000000000..2a1ea77013 --- /dev/null +++ b/tests/tests_sign_workflow/test_signer_windows.py @@ -0,0 +1,49 @@ +import os +import unittest +from pathlib import Path +from unittest.mock import MagicMock, Mock, call, patch + +from sign_workflow.signer_windows import SignerWindows + + +class TestSignerWindows(unittest.TestCase): + + @patch("sign_workflow.signer.GitRepository") + def test_accepted_file_types(self, git_repo: Mock) -> None: + artifacts = [ + "bad-xml.xml", + "the-msi.msi", + "the-exe.exe", + "the-dll.dll", + "the-sys.sys", + "the-ps1.ps1", + "the-psm1.psm1", + "the-cat.cat", + "the-zip.zip", + "random-file.txt", + "something-1.0.0.0.jar", + ] + expected = [ + call("the-msi.msi", Path("path"), ".asc"), + call("the-exe.exe", Path("path"), ".asc"), + call("the-dll.dll", Path("path"), ".asc"), + call("the-sys.sys", Path("path"), ".asc"), + call("the-ps1.ps1", Path("path"), ".asc"), + call("the-psm1.psm1", Path("path"), ".asc"), + call("the-cat.cat", Path("path"), ".asc"), + call("the-zip.zip", Path("path"), ".asc"), + ] + signer = SignerWindows() + signer.sign = MagicMock() # type: ignore + signer.sign_artifacts(artifacts, Path("path"), ".asc") + self.assertEqual(signer.sign.call_args_list, expected) + + @patch("sign_workflow.signer.GitRepository") + @patch('os.rename') + @patch('os.mkdir') + def test_signer_sign(self, mock_os_mkdir: Mock, mock_os_rename: Mock, mock_repo: Mock) -> None: + signer = SignerWindows() + signer.sign("the-msi.msi", Path("/path/"), ".asc") + command = "./opensearch-signer-client -i " + os.path.join(Path("/path/"), 'the-msi.msi') + " -o " + os.path.join(Path("/path/"), 'signed_the-msi.msi') + " -p windows" + mock_repo.assert_has_calls( + [call().execute(command)]) diff --git a/tests/tests_sign_workflow/test_signers.py b/tests/tests_sign_workflow/test_signers.py new file mode 100644 index 0000000000..57b63b88ed --- /dev/null +++ b/tests/tests_sign_workflow/test_signers.py @@ -0,0 +1,30 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. + +import unittest +from unittest.mock import Mock, patch + +from sign_workflow.signer_pgp import SignerPGP +from sign_workflow.signer_windows import SignerWindows +from sign_workflow.signers import Signers + + +class TestSigners(unittest.TestCase): + + @patch("sign_workflow.signer.GitRepository") + def test_signer_PGP(self, mock_repo: Mock) -> None: + signer = Signers.create("linux") + self.assertIs(type(signer), SignerPGP) + + @patch("sign_workflow.signer.GitRepository") + def test_signer_windows(self, mock_repo: Mock) -> None: + signer = Signers.create("windows") + self.assertIs(type(signer), SignerWindows) + + def test_signer_invalid(self) -> None: + with self.assertRaises(ValueError) as ctx: + Signers.create("mac") + self.assertEqual(str(ctx.exception), "Unsupported type of platform for signing: mac") diff --git a/vars/buildUploadManifestSHA.groovy b/vars/buildUploadManifestSHA.groovy index 805a044ccf..148f4016a5 100644 --- a/vars/buildUploadManifestSHA.groovy +++ b/vars/buildUploadManifestSHA.groovy @@ -3,7 +3,12 @@ void call(Map args = [:]) { def sha = getManifestSHA(args) - withAWS(role: 'opensearch-bundle', roleAccount: "${AWS_ACCOUNT_PUBLIC}", duration: 900, roleSessionName: 'jenkins-session') { - s3Upload(bucket: "${ARTIFACT_BUCKET_NAME}", file: sha.lock, path: sha.path) - } -} \ No newline at end of file + withCredentials([ + string(credentialsId: 'jenkins-artifact-bucket-name', variable: 'ARTIFACT_BUCKET_NAME'), + string(credentialsId: 'jenkins-aws-account-public', variable: 'AWS_ACCOUNT_PUBLIC')]) { + withAWS(role: 'opensearch-bundle', roleAccount: "${AWS_ACCOUNT_PUBLIC}", duration: 900, roleSessionName: 'jenkins-session') { + s3Upload(bucket: "${ARTIFACT_BUCKET_NAME}", file: sha.lock, path: sha.path) + } + } + +} diff --git a/vars/createReleaseTag.groovy b/vars/createReleaseTag.groovy index beb7357f2d..b3c5f4a64e 100644 --- a/vars/createReleaseTag.groovy +++ b/vars/createReleaseTag.groovy @@ -1,9 +1,9 @@ def call(Map args = [:]) { def lib = library(identifier: 'jenkins@20211123', retriever: legacySCM(scm)) - def buildManifestObj = lib.jenkins.BuildManifest.new(readYaml(file: args.distManifest)) + def bundleManifestObj = lib.jenkins.BundleManifest.new(readYaml(file: args.distManifest)) - def componentsName = buildManifestObj.getNames() + def componentsName = bundleManifestObj.getNames() def componetsNumber = componentsName.size() def version = args.tagVersion def untaggedRepoList = [] @@ -11,8 +11,8 @@ def call(Map args = [:]) { withCredentials([usernamePassword(credentialsId: "${GITHUB_BOT_TOKEN_NAME}", usernameVariable: 'GITHUB_USER', passwordVariable: 'GITHUB_TOKEN')]) { for (component in componentsName) { - def commitID = buildManifestObj.getCommitId(component) - def repo = buildManifestObj.getRepo(component) + def commitID = bundleManifestObj.getCommitId(component) + def repo = bundleManifestObj.getRepo(component) def push_url = "https://$GITHUB_TOKEN@" + repo.minus('https://') echo "Tagging $component at $commitID ..." diff --git a/vars/downloadFromS3.groovy b/vars/downloadFromS3.groovy index 6ac31404be..1e0c04ffac 100644 --- a/vars/downloadFromS3.groovy +++ b/vars/downloadFromS3.groovy @@ -1,7 +1,7 @@ -void call(Map args = [:]){ - - withAWS(role: "${ARTIFACT_DOWNLOAD_ROLE_NAME}", roleAccount: "${AWS_ACCOUNT_PUBLIC}", duration: 900, roleSessionName: 'jenkins-session') { - s3Download(file: args.destPath, bucket: args.bucket, path: args.path, force: args.force) +void call(Map args = [:]) { + withCredentials([string(credentialsId: 'jenkins-aws-account-public', variable: 'AWS_ACCOUNT_PUBLIC')]) { + withAWS(role: 'opensearch-bundle', roleAccount: "${AWS_ACCOUNT_PUBLIC}", duration: 900, roleSessionName: 'jenkins-session') { + s3Download(file: args.destPath, bucket: args.bucket, path: args.path, force: args.force) + } } - } diff --git a/vars/getManifestSHA.groovy b/vars/getManifestSHA.groovy index 23a810fde3..0ff2d7ea22 100644 --- a/vars/getManifestSHA.groovy +++ b/vars/getManifestSHA.groovy @@ -21,11 +21,14 @@ Map call(Map args = [:]) { echo "Manifest SHA path: ${manifestSHAPath}" Boolean manifestSHAExists = false - withAWS(role: 'opensearch-bundle', roleAccount: "${AWS_ACCOUNT_PUBLIC}", duration: 900, roleSessionName: 'jenkins-session') { - if (s3DoesObjectExist(bucket: "${ARTIFACT_BUCKET_NAME}", path: manifestSHAPath)) { - manifestSHAExists = true + withCredentials([string(credentialsId: 'jenkins-aws-account-public', variable: 'AWS_ACCOUNT_PUBLIC'), + string(credentialsId: 'jenkins-artifact-bucket-name', variable: 'ARTIFACT_BUCKET_NAME')]) { + withAWS(role: 'opensearch-bundle', roleAccount: "${AWS_ACCOUNT_PUBLIC}", duration: 900, roleSessionName: 'jenkins-session') { + if (s3DoesObjectExist(bucket: "${ARTIFACT_BUCKET_NAME}", path: manifestSHAPath)) { + manifestSHAExists = true } } + } echo "Manifest SHA exists: ${manifestSHAExists}" diff --git a/vars/promoteArtifacts.groovy b/vars/promoteArtifacts.groovy index ea7af6c858..67f06873a9 100644 --- a/vars/promoteArtifacts.groovy +++ b/vars/promoteArtifacts.groovy @@ -20,90 +20,92 @@ void call(Map args = [:]) { String revision = version + qualifier println("Revision: ${revision}") - List distributionList = ["tar", "rpm"] - - for (distribution in distributionList) { - - // Must use local variable due to groovy for loop and closure scope - // Or the stage will fixed to the last item in return when trigger new stages - // https://web.archive.org/web/20181121065904/http://blog.freeside.co/2013/03/29/groovy-gotcha-for-loops-and-closure-scope/ - def distribution_local = distribution - def artifactPath = "${DISTRIBUTION_JOB_NAME}/${revision}/${DISTRIBUTION_BUILD_NUMBER}/${DISTRIBUTION_PLATFORM}/${DISTRIBUTION_ARCHITECTURE}/${distribution_local}" - def prefixPath = "${WORKSPACE}/artifacts/${distribution_local}" - println("S3 download ${distribution_local} artifacts before creating signatures") - - withAWS(role: "${ARTIFACT_DOWNLOAD_ROLE_NAME}", roleAccount: "${AWS_ACCOUNT_PUBLIC}", duration: 900, roleSessionName: 'jenkins-session') { - s3Download(bucket: "${ARTIFACT_BUCKET_NAME}", file: "${prefixPath}", path: "${artifactPath}/", force: true) - } - - String build_manifest = "$prefixPath/$artifactPath/builds/$filename/manifest.yml" - def buildManifest = readYaml(file: build_manifest) + List distributionList = ['tar', 'rpm'] + + withCredentials([string(credentialsId: 'jenkins-aws-account-public', variable: 'AWS_ACCOUNT_PUBLIC'), + string(credentialsId: 'jenkins-artifact-bucket-name', variable: 'ARTIFACT_BUCKET_NAME'), + string(credentialsId: 'jenkins-artifact-promotion-role', variable: 'ARTIFACT_PROMOTION_ROLE_NAME'), + string(credentialsId: 'jenkins-aws-production-account', variable: 'AWS_ACCOUNT_ARTIFACT'), + string(credentialsId: 'jenkins-artifact-production-bucket-name', variable: 'ARTIFACT_PRODUCTION_BUCKET_NAME')]) { + for (distribution in distributionList) { + // Must use local variable due to groovy for loop and closure scope + // Or the stage will fixed to the last item in return when trigger new stages + // https://web.archive.org/web/20181121065904/http://blog.freeside.co/2013/03/29/groovy-gotcha-for-loops-and-closure-scope/ + def distribution_local = distribution + def artifactPath = "${DISTRIBUTION_JOB_NAME}/${revision}/${DISTRIBUTION_BUILD_NUMBER}/${DISTRIBUTION_PLATFORM}/${DISTRIBUTION_ARCHITECTURE}/${distribution_local}" + def prefixPath = "${WORKSPACE}/artifacts/${distribution_local}" + println("S3 download ${distribution_local} artifacts before creating signatures") + + withAWS(role: 'opensearch-bundle', roleAccount: "${AWS_ACCOUNT_PUBLIC}", duration: 900, roleSessionName: 'jenkins-session') { + s3Download(bucket: "${ARTIFACT_BUCKET_NAME}", file: "${prefixPath}", path: "${artifactPath}/", force: true) + } - print("Actions ${fileActions}") + String build_manifest = "$prefixPath/$artifactPath/builds/$filename/manifest.yml" + def buildManifest = readYaml(file: build_manifest) - argsMap = [:] - argsMap['sigtype'] = '.sig' + argsMap = [:] + argsMap['sigtype'] = '.sig' - String corePluginDir = "$prefixPath/$artifactPath/builds/$filename/core-plugins" - boolean corePluginDirExists = fileExists(corePluginDir) + String corePluginDir = "$prefixPath/$artifactPath/builds/$filename/core-plugins" + boolean corePluginDirExists = fileExists(corePluginDir) - //////////// Signing Artifacts - println("Signing Starts") + //////////// Signing Artifacts + println('Signing Starts') - if(corePluginDirExists && distribution_local.equals('tar')) { - println("Signing Core Plugins") - argsMap['artifactPath'] = corePluginDir - for (Closure action : fileActions) { - action(argsMap) + if (corePluginDirExists && distribution_local.equals('tar')) { + println('Signing Core Plugins') + argsMap['artifactPath'] = corePluginDir + for (Closure action : fileActions) { + action(argsMap) + } } - } - println("Signing Core/Bundle Artifacts") - String coreFullPath = ['core', filename, revision].join('/') - String bundleFullPath = ['bundle', filename, revision].join('/') - for (Closure action : fileActions) { - for (file in findFiles(glob: "**/${filename}-min-${revision}*.${distribution_local}*,**/${filename}-${revision}*.${distribution_local}*")) { - argsMap['artifactPath'] = "$WORKSPACE" + "/" + file.getPath() - action(argsMap) + println('Signing Core/Bundle Artifacts') + String coreFullPath = ['core', filename, revision].join('/') + String bundleFullPath = ['bundle', filename, revision].join('/') + for (Closure action : fileActions) { + for (file in findFiles(glob: "**/${filename}-min-${revision}*.${distribution_local}*,**/${filename}-${revision}*.${distribution_local}*")) { + argsMap['artifactPath'] = "$WORKSPACE" + '/' + file.getPath() + action(argsMap) + } } - } - //////////// Uploading Artifacts - withAWS(role: "${ARTIFACT_PROMOTION_ROLE_NAME}", roleAccount: "${AWS_ACCOUNT_ARTIFACT}", duration: 900, roleSessionName: 'jenkins-session') { - // Core Plugins only needs to be published once through Tar, ignore other distributions - if(corePluginDirExists && distribution_local.equals('tar')) { - List corePluginList = buildManifest.components.artifacts."core-plugins"[0] - for (String pluginSubPath : corePluginList) { - String pluginSubFolder = pluginSubPath.split('/')[0] - String pluginNameWithExt = pluginSubPath.split('/')[1] - String pluginName = pluginNameWithExt.replace('-' + revision + '.zip', '') - String pluginNameNoExt = pluginNameWithExt.replace('-' + revision, '') - String pluginFullPath = ['plugins', pluginName, revision].join('/') - s3Upload( + //////////// Uploading Artifacts + withAWS(role: "${ARTIFACT_PROMOTION_ROLE_NAME}", roleAccount: "${AWS_ACCOUNT_ARTIFACT}", duration: 900, roleSessionName: 'jenkins-session') { + // Core Plugins only needs to be published once through Tar, ignore other distributions + if (corePluginDirExists && distribution_local.equals('tar')) { + List corePluginList = buildManifest.components.artifacts.'core-plugins'[0] + for (String pluginSubPath : corePluginList) { + String pluginSubFolder = pluginSubPath.split('/')[0] + String pluginNameWithExt = pluginSubPath.split('/')[1] + String pluginName = pluginNameWithExt.replace('-' + revision + '.zip', '') + String pluginNameNoExt = pluginNameWithExt.replace('-' + revision, '') + String pluginFullPath = ['plugins', pluginName, revision].join('/') + s3Upload( bucket: "${ARTIFACT_PRODUCTION_BUCKET_NAME}", path: "releases/$pluginFullPath/", workingDir: "$prefixPath/$artifactPath/builds/$filename/core-plugins/", includePathPattern: "**/${pluginName}*" ) + } } - } - - // We will only publish min artifacts for Tar, ignore other distributions - if (distribution_local.equals('tar')) { - s3Upload( + + // We will only publish min artifacts for Tar, ignore other distributions + if (distribution_local.equals('tar')) { + s3Upload( bucket: "${ARTIFACT_PRODUCTION_BUCKET_NAME}", path: "releases/$coreFullPath/", workingDir: "$prefixPath/$artifactPath/builds/$filename/dist/", includePathPattern: "**/${filename}-min-${revision}-${DISTRIBUTION_PLATFORM}-${DISTRIBUTION_ARCHITECTURE}*") - } + } - // We will publish bundle artifacts for all distributions - s3Upload( + // We will publish bundle artifacts for all distributions + s3Upload( bucket: "${ARTIFACT_PRODUCTION_BUCKET_NAME}", path: "releases/$bundleFullPath/", workingDir: "$prefixPath/$artifactPath/dist/$filename/", includePathPattern: "**/${filename}-${revision}-${DISTRIBUTION_PLATFORM}-${DISTRIBUTION_ARCHITECTURE}*") - + } + } } - } } diff --git a/vars/promoteContainer.groovy b/vars/promoteContainer.groovy new file mode 100644 index 0000000000..2d3b8aa62d --- /dev/null +++ b/vars/promoteContainer.groovy @@ -0,0 +1,102 @@ +/**@ + * Promote image from staging docker to production docker hub or ECR repository. + * + * @param args A map of the following parameters + * @param args.imageRepository The repository of staging image. E.g.: opensearch:2.0.1.3910, opensearch-dashboards:2.0.1, data-prepper:2.0.1-1234 + * @param args.version The official version for release. E.g.: 2.0.1 + * @param args.dockerPromote The boolean argument if promote containers from staging to production docker repo. + * @param args.ecrPromote The boolean argument if promote containers from staging to production ECR repo. + * @param args.latestTag The boolean argument if promote containers from staging to production with latest tag. + * @param args.majorVersionTag The boolean argument if promote containers from staging to production with its major version tag. + */ +void call(Map args = [:]) { + + def imageRepo = args.imageRepository + def version = args.version + def imageProduct = imageRepo.split(':').first() + def sourceTag = imageRepo.split(':').last() + def dockerPromote = args.dockerPromote + def ecrPromote = args.ecrPromote + def latestBoolean = args.latestTag + def majorVersionBoolean = args.majorVersionTag + def majorVersion = version.split("\\.").first() + + def sourceReg = (imageProduct == 'data-prepper') ? "${DATA_PREPPER_STAGING_CONTAINER_REPOSITORY}" : "opensearchstaging" + def dockerProduction = "opensearchproject" + def ecrProduction = "public.ecr.aws/opensearchproject" + + //Promoting docker images + if (dockerPromote.toBoolean()) { + println("Promoting $imageProduct to production docker hub with with $version tag.") + dockerCopy: { + build job: 'docker-copy', + parameters: [ + string(name: 'SOURCE_IMAGE_REGISTRY', value: sourceReg), + string(name: 'SOURCE_IMAGE', value: "${imageProduct}:${sourceTag}"), + string(name: 'DESTINATION_IMAGE_REGISTRY', value: dockerProduction), + string(name: 'DESTINATION_IMAGE', value: "${imageProduct}:${version}") + ] + } + if (majorVersionBoolean.toBoolean()) { + println("Promoting to production docker hub with with $majorVersion tag.") + dockerCopy: { + build job: 'docker-copy', + parameters: [ + string(name: 'SOURCE_IMAGE_REGISTRY', value: sourceReg), + string(name: 'SOURCE_IMAGE', value: "${imageProduct}:${sourceTag}"), + string(name: 'DESTINATION_IMAGE_REGISTRY', value: dockerProduction), + string(name: 'DESTINATION_IMAGE', value: "${imageProduct}:${majorVersion}") + ] + } + } + if (latestBoolean.toBoolean()) { + println("Promoting to production docker hub with with latest tag.") + dockerCopy: { + build job: 'docker-copy', + parameters: [ + string(name: 'SOURCE_IMAGE_REGISTRY', value: sourceReg), + string(name: 'SOURCE_IMAGE', value: "${imageProduct}:${sourceTag}"), + string(name: 'DESTINATION_IMAGE_REGISTRY', value: dockerProduction), + string(name: 'DESTINATION_IMAGE', value: "${imageProduct}:latest") + ] + } + } + } + //Promoting image to ECR + if (ecrPromote.toBoolean()) { + println("Promoting to production ECR with with $version tag.") + dockerCopy: { + build job: 'docker-copy', + parameters: [ + string(name: 'SOURCE_IMAGE_REGISTRY', value: sourceReg), + string(name: 'SOURCE_IMAGE', value: "${imageProduct}:${sourceTag}"), + string(name: 'DESTINATION_IMAGE_REGISTRY', value: ecrProduction), + string(name: 'DESTINATION_IMAGE', value: "${imageProduct}:${version}") + ] + } + if (majorVersionBoolean.toBoolean()) { + println("Promoting to production ECR with with $majorVersion tag.") + dockerCopy: { + build job: 'docker-copy', + parameters: [ + string(name: 'SOURCE_IMAGE_REGISTRY', value: sourceReg), + string(name: 'SOURCE_IMAGE', value: "${imageProduct}:${sourceTag}"), + string(name: 'DESTINATION_IMAGE_REGISTRY', value: ecrProduction), + string(name: 'DESTINATION_IMAGE', value: "${imageProduct}:${majorVersion}") + ] + } + } + if (latestBoolean.toBoolean()) { + println("Promoting to production ECR with with latest tag.") + dockerCopy: { + build job: 'docker-copy', + parameters: [ + string(name: 'SOURCE_IMAGE_REGISTRY', value: sourceReg), + string(name: 'SOURCE_IMAGE', value: "${imageProduct}:${sourceTag}"), + string(name: 'DESTINATION_IMAGE_REGISTRY', value: ecrProduction), + string(name: 'DESTINATION_IMAGE', value: "${imageProduct}:latest") + ] + } + } + } +} diff --git a/vars/promoteYumRepos.groovy b/vars/promoteYumRepos.groovy index a18be4835e..f1e1614b8c 100644 --- a/vars/promoteYumRepos.groovy +++ b/vars/promoteYumRepos.groovy @@ -17,7 +17,7 @@ void call(Map args = [:]) { String buildnumber = args.buildNumber ?: 'none' if (buildnumber == 'none') { - println("User did not enter build number in jenkins parameter, exit 1") + println('User did not enter build number in jenkins parameter, exit 1') System.exit(1) } @@ -40,72 +40,73 @@ void call(Map args = [:]) { String yumRepoProdPath = "releases/bundle/${filename}/${yumRepoVersion}/yum" String artifactPath = "${localPath}/${yumRepoProdPath}" + withCredentials([string(credentialsId: 'jenkins-artifact-promotion-role', variable: 'ARTIFACT_PROMOTION_ROLE_NAME'), + string(credentialsId: 'jenkins-aws-production-account', variable: 'AWS_ACCOUNT_ARTIFACT'), + string(credentialsId: 'jenkins-artifact-production-bucket-name', variable: 'ARTIFACT_PRODUCTION_BUCKET_NAME')]) { + withAWS(role: "${ARTIFACT_PROMOTION_ROLE_NAME}", roleAccount: "${AWS_ACCOUNT_ARTIFACT}", duration: 900, roleSessionName: 'jenkins-session') { + println('Pulling Prod Yumrepo') + sh("aws s3 sync s3://${ARTIFACT_PRODUCTION_BUCKET_NAME}/${yumRepoProdPath}/ ${artifactPath}/ --no-progress") + } - withAWS(role: "${ARTIFACT_PROMOTION_ROLE_NAME}", roleAccount: "${AWS_ACCOUNT_ARTIFACT}", duration: 900, roleSessionName: 'jenkins-session') { - println("Pulling Prod Yumrepo") - sh("aws s3 sync s3://${ARTIFACT_PRODUCTION_BUCKET_NAME}/${yumRepoProdPath}/ ${artifactPath}/ --no-progress") - } - - sh """ - set -e - set +x - set +x + sh """ + set -e + set +x + set +x - echo "Pulling ${revision} rpms" - cd ${artifactPath} - curl -SLO ${stagingYumPathX64} - curl -SLO ${stagingYumPathARM64} + echo "Pulling ${revision} rpms" + cd ${artifactPath} + curl -SLO ${stagingYumPathX64} + curl -SLO ${stagingYumPathARM64} - ls -l + ls -l - rm -vf repodata/repomd.xml.asc + rm -vf repodata/repomd.xml.asc - echo "Update repo metadata" - createrepo --update . + echo "Update repo metadata" + createrepo --update . - # Rename .xml to .pom for signing - # Please do not add .xml to signer filter - # As maven have many .xml and we do not want to sign them - # This is an outlier case for yum repo only - mv -v repodata/repomd.xml repodata/repomd.pom + # Rename .xml to .pom for signing + # Please do not add .xml to signer filter + # As maven have many .xml and we do not want to sign them + # This is an outlier case for yum repo only + mv -v repodata/repomd.xml repodata/repomd.pom - echo "Complete metadata update, awaiting signing repomd.xml" + echo "Complete metadata update, awaiting signing repomd.xml" cd - """ - signArtifacts( - artifactPath: "${artifactPath}/repodata/repomd.pom", - sigtype: '.sig', - platform: 'linux' - ) + signArtifacts( + artifactPath: "${artifactPath}/repodata/repomd.pom", + sigtype: '.sig', + platform: 'linux' + ) - sh """ - set -e - set +x + sh """ + set -e + set +x - cd ${artifactPath}/repodata/ + cd ${artifactPath}/repodata/ - ls -l + ls -l - mv -v repomd.pom repomd.xml - mv -v repomd.pom.sig repomd.xml.sig + mv -v repomd.pom repomd.xml + mv -v repomd.pom.sig repomd.xml.sig - # This step is required as yum only accept .asc and signing workflow only support .sig - cat repomd.xml.sig | gpg --enarmor | sed 's@ARMORED FILE@SIGNATURE@g' > repomd.xml.asc + # This step is required as yum only accept .asc and signing workflow only support .sig + cat repomd.xml.sig | gpg --enarmor | sed 's@ARMORED FILE@SIGNATURE@g' > repomd.xml.asc - rm -vf repomd.xml.sig + rm -vf repomd.xml.sig - ls -l - - cd - + ls -l + cd - """ - withAWS(role: "${ARTIFACT_PROMOTION_ROLE_NAME}", roleAccount: "${AWS_ACCOUNT_ARTIFACT}", duration: 900, roleSessionName: 'jenkins-session') { - println("Pushing Prod Yumrepo") - sh("aws s3 sync ${artifactPath}/ s3://${ARTIFACT_PRODUCTION_BUCKET_NAME}/${yumRepoProdPath}/ --no-progress") - } - + withAWS(role: "${ARTIFACT_PROMOTION_ROLE_NAME}", roleAccount: "${AWS_ACCOUNT_ARTIFACT}", duration: 900, roleSessionName: 'jenkins-session') { + println('Pushing Prod Yumrepo') + sh("aws s3 sync ${artifactPath}/ s3://${ARTIFACT_PRODUCTION_BUCKET_NAME}/${yumRepoProdPath}/ --no-progress") + } + } } diff --git a/vars/runGradleCheck.groovy b/vars/runGradleCheck.groovy index ae52a02c62..3705fbebd7 100644 --- a/vars/runGradleCheck.groovy +++ b/vars/runGradleCheck.groovy @@ -16,12 +16,11 @@ void call(Map args = [:]) { usernamePassword(credentialsId: "jenkins-gradle-check-s3-aws-resources", usernameVariable: 'amazon_s3_base_path', passwordVariable: 'amazon_s3_bucket')]) { sh """ + #!/bin/bash set -e set +x - env | grep JAVA | grep HOME - echo "Git clone: ${git_repo_url} with ref: ${git_reference}" rm -rf search git clone ${git_repo_url} search @@ -29,9 +28,24 @@ void call(Map args = [:]) { git checkout -f ${git_reference} git rev-parse HEAD - echo "Stop existing gradledaemon" + echo "Get Major Version" + OS_VERSION=`cat buildSrc/version.properties | grep opensearch | cut -d= -f2 | grep -oE '[0-9.]+'` + OS_MAJOR_VERSION=`echo \$OS_VERSION | grep -oE '[0-9]+' | head -n 1` + echo "Version: \$OS_VERSION, Major Version: \$OS_MAJOR_VERSION" + + if [ "\$OS_MAJOR_VERSION" -lt 2 ]; then + echo "Using JAVA 11" + export JAVA_HOME=\$JAVA11_HOME + else + echo "Using JAVA 17" + export JAVA_HOME=\$JAVA17_HOME + fi + + env | grep JAVA | grep HOME + + echo "Gradle clean cache and stop existing gradledaemon" ./gradlew --stop - find ~/.gradle -type f -name "*.lock" -delete + rm -rf ~/.gradle echo "Check existing dockercontainer" docker ps -a @@ -43,9 +57,15 @@ void call(Map args = [:]) { echo "Check docker-compose version" docker-compose version + echo "Check existing processes" + ps -ef | grep [o]pensearch | wc -l + echo "Cleanup existing processes" + kill -9 `ps -ef | grep [o]pensearch | awk '{print \$2}'` > /dev/null 2>&1 || echo + ps -ef | grep [o]pensearch | wc -l + echo "Start gradlecheck" GRADLE_CHECK_STATUS=0 - ./gradlew check -Dtests.coverage=true --no-daemon --no-scan || GRADLE_CHECK_STATUS=1 + ./gradlew clean && ./gradlew check -Dtests.coverage=true --no-daemon --no-scan || GRADLE_CHECK_STATUS=1 if [ "\$GRADLE_CHECK_STATUS" != 0 ]; then echo Gradle Check Failed! diff --git a/vars/runPerfTestScript.groovy b/vars/runPerfTestScript.groovy index 550ad42c6e..6f31fa537f 100644 --- a/vars/runPerfTestScript.groovy +++ b/vars/runPerfTestScript.groovy @@ -4,9 +4,12 @@ void call(Map args = [:]) { install_opensearch_infra_dependencies() config_name = isNullOrEmpty(args.config) ? "config.yml" : args.config - withAWS(role: 'opensearch-test', roleAccount: "${AWS_ACCOUNT_PUBLIC}", duration: 900, roleSessionName: 'jenkins-session') { - s3Download(file: "config.yml", bucket: "${ARTIFACT_BUCKET_NAME}", path: "${PERF_TEST_CONFIG_LOCATION}/${config_name}", force: true) - } + withCredentials([string(credentialsId: 'jenkins-aws-account-public', variable: 'AWS_ACCOUNT_PUBLIC'), + string(credentialsId: 'jenkins-artifact-bucket-name', variable: 'ARTIFACT_BUCKET_NAME')]) { + withAWS(role: 'opensearch-test', roleAccount: "${AWS_ACCOUNT_PUBLIC}", duration: 900, roleSessionName: 'jenkins-session') { + s3Download(file: "config.yml", bucket: "${ARTIFACT_BUCKET_NAME}", path: "${PERF_TEST_CONFIG_LOCATION}/${config_name}", force: true) + } + } String stackNameSuffix = isNullOrEmpty(args.stackNameSuffix) ? 'perf-test' : args.stackNameSuffix diff --git a/vars/signArtifacts.groovy b/vars/signArtifacts.groovy index 491ddac51d..440b746ee8 100644 --- a/vars/signArtifacts.groovy +++ b/vars/signArtifacts.groovy @@ -15,15 +15,17 @@ SignArtifacts signs the given artifacts and saves the signature in the same dire @param Map[platform] - The distribution platform for signing. */ void call(Map args = [:]) { - if (args.sigtype.equals('.rpm')) { - echo "RPM Add Sign" + withCredentials([string(credentialsId: 'jenkins-rpm-signing-props', variable: 'configs')]) { + def props = readJSON(text: configs) + def signingAccount = props['account'] + def signingPassphraseSecretsArn = props['passphrase_secrets_arn'] + def signingSecretKeyIdSecretsArn = props['secret_key_id_secrets_arn'] + def signingKeyId = props['key_id'] + + echo 'RPM Add Sign' - withAWS(role: "${SIGN_ASM_ROLE}", roleAccount: "${SIGN_ASM_ACCOUNT}", duration: 900, roleSessionName: 'jenkins-signing-session') { - withCredentials([ - string(credentialsId: 'jenkins-rpm-signing-asm-pass-id', variable: 'SIGNING_PASS_ID'), - string(credentialsId: 'jenkins-rpm-signing-asm-secret-id', variable: 'SIGNING_SECRET_ID')]) - { + withAWS(role: 'jenki-jenki-asm-assume-role', roleAccount: "${signingAccount}", duration: 900, roleSessionName: 'jenkins-signing-session') { sh """ set -e set +x @@ -61,8 +63,8 @@ void call(Map args = [:]) { echo "------------------------------------------------------------------------" echo "Import OpenSearch keys" - aws secretsmanager get-secret-value --region "${SIGN_ASM_REGION}" --secret-id "${SIGNING_PASS_ID}" | jq -r .SecretBinary | base64 --decode > passphrase - aws secretsmanager get-secret-value --region "${SIGN_ASM_REGION}" --secret-id "${SIGNING_SECRET_ID}" | jq -r .SecretBinary | base64 --decode | gpg --quiet --import --pinentry-mode loopback --passphrase-file passphrase - + aws secretsmanager get-secret-value --region us-west-2 --secret-id "${signingPassphraseSecretsArn}" | jq -r .SecretBinary | base64 --decode > passphrase + aws secretsmanager get-secret-value --region us-west-2 --secret-id "${signingSecretKeyIdSecretsArn}" | jq -r .SecretBinary | base64 --decode | gpg --quiet --import --pinentry-mode loopback --passphrase-file passphrase - echo "------------------------------------------------------------------------" echo "Start Signing Rpm" @@ -89,56 +91,61 @@ void call(Map args = [:]) { echo "------------------------------------------------------------------------" echo "Clean up gpg" - gpg --batch --yes --delete-secret-keys $SIGN_ASM_KEYID - gpg --batch --yes --delete-keys $SIGN_ASM_KEYID + gpg --batch --yes --delete-secret-keys ${signingKeyId} + gpg --batch --yes --delete-keys ${signingKeyId} rm -v passphrase """ - - } + } } - } else { - echo "PGP Signature Signing" + echo "PGP or Windows Signature Signing" - if( !fileExists("$WORKSPACE/sign.sh")) { + if (!fileExists("$WORKSPACE/sign.sh")) { git url: 'https://github.com/opensearch-project/opensearch-build.git', branch: 'main' } importPGPKey() - + String arguments = generateArguments(args) // Sign artifacts - withCredentials([usernamePassword(credentialsId: "${GITHUB_BOT_TOKEN_NAME}", usernameVariable: 'GITHUB_USER', passwordVariable: 'GITHUB_TOKEN')]) { + def configSecret = args.platform == "windows" ? "signer-windows-config" : "signer-pgp-config" + withCredentials([usernamePassword(credentialsId: "${GITHUB_BOT_TOKEN_NAME}", usernameVariable: 'GITHUB_USER', passwordVariable: 'GITHUB_TOKEN'), + string(credentialsId: configSecret, variable: 'configs')]) { + def creds = readJSON(text: configs) + def ROLE = creds['role'] + def EXTERNAL_ID = creds['external_id'] + def UNSIGNED_BUCKET = creds['unsigned_bucket'] + def SIGNED_BUCKET = creds['signed_bucket'] + def PROFILE_IDENTIFIER = creds['profile_identifier'] + def PLATFORM_IDENTIFIER = creds['platform_identifier'] sh """ - #!/bin/bash - set +x - export ROLE=${SIGNER_CLIENT_ROLE} - export EXTERNAL_ID=${SIGNER_CLIENT_EXTERNAL_ID} - export UNSIGNED_BUCKET=${SIGNER_CLIENT_UNSIGNED_BUCKET} - export SIGNED_BUCKET=${SIGNER_CLIENT_SIGNED_BUCKET} - - $WORKSPACE/sign.sh ${arguments} - """ + #!/bin/bash + set +x + export ROLE=$ROLE + export EXTERNAL_ID=$EXTERNAL_ID + export UNSIGNED_BUCKET=$UNSIGNED_BUCKET + export SIGNED_BUCKET=$SIGNED_BUCKET + export PROFILE_IDENTIFIER=$PROFILE_IDENTIFIER + export PLATFORM_IDENTIFIER=$PLATFORM_IDENTIFIER + + $WORKSPACE/sign.sh ${arguments} + """ } - } } String generateArguments(args) { - String artifactPath = args.remove("artifactPath") + String artifactPath = args.remove('artifactPath') // artifactPath is mandatory and the first argument String arguments = artifactPath // generation command line arguments - args.each{key, value -> arguments += " --${key}=${value}"} + args.each { key, value -> arguments += " --${key }=${value }"} return arguments } -void importPGPKey(){ - - sh "curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -" - +void importPGPKey() { + sh 'curl -sSL https://artifacts.opensearch.org/publickeys/opensearch.pgp | gpg --import -' } -