Open medamineziraoui opened 3 years ago
HOW TO TEST:
The release : 2.1.4
sdkman
)https://saagie-workspace.prod.saagie.io/
exportLocation
path that exist in the script.tmp
inside the exportLocation
job
pipeline
inside the exportLocation
build.gradle
saagieprojectid=6d8168ba-0d16-41da-b5cf-964eb9c3581d // <===== Make sure this project exist on the target platform saagieUrl
saagieurl = https://saagie-workspace.prod.saagie.io
saagieuserid = YOUR_USER_NAME
saagiepassword = YOUR_PASSWORD
saagieplatformid=THE_SAAGIE_PLATFORM_ID
Then you need to use this build.
import io.saagie.plugin.dataops.tasks.projects.artifact.ProjectsExportJobV1Task
import io.saagie.plugin.dataops.tasks.projects.artifact.ProjectsImportJobTask
import groovy.json.JsonSlurper;
plugins {
id 'groovy'
id 'base'
id 'io.saagie.gradle-saagie-dataops-plugin' version '2.1.8'
}
dependencies {
implementation group: 'gradle.plugin.io.saagie', name: 'gradle-saagie-dataops-plugin', version: '2.1.8'
}
def exportLocation = '/location/on/your/system' // <=== edit this with the export path
def tmpLocation = ' /location/on/your/system/tmp' // <=== edit this with the tmp file path
task projectExportSqoop(type: ProjectsExportJobV1Task) { // <====== Create the first job
configuration = saagie {}
doFirst {
projectExportSqoop.configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
acceptSelfSigned = true
}
job {
ids = ['22186']
}
exportArtifacts {
export_file = exportLocation + '/job/sqoop.zip'
overwrite=true
temporary_directory= tmpLocation
}
}
}
taskName = 'projectExportSqoop'
group = 'Saagie'
description= 'export v1 sqoop job for saagie'
}
task projectExportSpark(type: ProjectsExportJobV1Task) { // <====== Create the second job
configuration = saagie {}
doFirst {
projectExportSpark.configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
acceptSelfSigned = true
}
job {
ids = ['4708']
include_all_versions=true
}
exportArtifacts {
export_file = exportLocation + '/job/spark.zip'
overwrite=true
temporary_directory= tmpLocation
}
}
}
taskName = 'projectExportSpark'
group = 'Saagie'
description= 'export v1 spark job for saagie'
}
task projectExportJava(type: ProjectsExportJobV1Task) { // <====== Create the second job
configuration = saagie {}
doFirst {
projectExportJava.configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
acceptSelfSigned = true
}
job {
ids = ['21754']
}
exportArtifacts {
export_file = exportLocation + '/job/java.zip'
overwrite=true
temporary_directory= tmpLocation
}
}
}
taskName = 'projectExportJava'
group = 'Saagie'
description= 'export v1 Java job for saagie'
}
task projectExportR(type: ProjectsExportJobV1Task) { // <====== Create the second job
configuration = saagie {}
doFirst {
projectExportR.configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
acceptSelfSigned = true
}
job {
ids = ['20983']
}
exportArtifacts {
export_file = exportLocation + '/job/r.zip'
overwrite=true
temporary_directory= tmpLocation
}
}
}
taskName = 'projectExportR'
group = 'Saagie'
description= 'export v1 R job for saagie'
}
task projectExportTalend(type: ProjectsExportJobV1Task) { // <====== Create the second job
configuration = saagie {}
doFirst {
projectExportTalend.configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
acceptSelfSigned = true
}
job {
ids = ['21439']
}
exportArtifacts {
export_file = exportLocation + '/job/talend.zip'
overwrite=true
temporary_directory= tmpLocation
}
}
}
taskName = 'projectExportTalend'
group = 'Saagie'
description= 'export v1 talend job for saagie'
}
task projectExportPython(type: ProjectsExportJobV1Task) { // <====== Create the second job
configuration = saagie {}
doFirst {
projectExportPython.configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
acceptSelfSigned = true
}
job {
ids = ['22702']
}
exportArtifacts {
export_file = exportLocation + '/job/python.zip'
overwrite=true
temporary_directory= tmpLocation
}
}
}
taskName = 'projectExportPython'
group = 'Saagie'
description= 'export v1 python job for saagie'
}
task projectExportDocker(type: ProjectsExportJobV1Task) { // <====== Create the second job
configuration = saagie {}
doFirst {
projectExportDocker.configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
acceptSelfSigned = true
}
job {
ids = ['22712']
}
exportArtifacts {
export_file = exportLocation + '/job/docker.zip'
overwrite=true
temporary_directory= tmpLocation
}
}
}
taskName = 'projectExportDocker'
group = 'Saagie'
description= 'export v1 Docker job for saagie'
}
task projectExportPipelineWithJobandVersion(type: ProjectsExportJobV1Task) { // <====== Create the second job
configuration = saagie {}
doFirst {
projectExportPipelineWithJobandVersion.configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
acceptSelfSigned = true
}
pipeline {
ids = ['484']
include_job=true
include_all_versions=true
}
exportArtifacts {
export_file = exportLocation + '/pipeline/pipelinejobver.zip'
overwrite=true
temporary_directory= tmpLocation
}
}
}
taskName = 'projectExportPipelineWithJobandVersion'
group = 'Saagie'
description= 'export Pipleline with job and version dependencies'
}
task projectExportPipeline(type: ProjectsExportJobV1Task) { // <====== Create the second job
configuration = saagie {}
doFirst {
projectExportPipeline.configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
acceptSelfSigned = true
}
pipeline {
ids = ['36']
include_job=false
include_all_versions=false
}
exportArtifacts {
export_file = exportLocation + '/pipeline/pipeline.zip'
overwrite=true
temporary_directory= tmpLocation
}
}
}
taskName = 'projectExportPipeline'
group = 'Saagie'
description= 'export Pipleline with out dependency'
}
task projectImportSqoop( // <======== Import artifacts, see that we use the gradle dependOn
type: ProjectsImportJobTask) {
dependsOn(['projectExportSqoop'])
configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
}
}
doFirst {
projectImportSqoop.configuration = saagie {
project {
id = saagieprojectid
}
importArtifacts {
import_file = exportLocation + '/job/sqoop.zip'
temporary_directory= tmpLocation
}
}
}
description= 'import sqoop job for saagie'
taskName = 'projectImportSqoop'
}
task projectImportSpark( // <======== Import artifacts, see that we use the gradle dependOn
type: ProjectsImportJobTask) {
dependsOn(['projectExportSpark'])
configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
}
}
doFirst {
projectImportSpark.configuration = saagie {
project {
id = saagieprojectid
}
importArtifacts {
import_file = exportLocation + '/job/spark.zip'
temporary_directory= tmpLocation
}
}
}
description= 'import sqoop job for saagie'
taskName = 'projectImportSpark'
}
task projectImportJava( // <======== Import artifacts, see that we use the gradle dependOn
type: ProjectsImportJobTask) {
dependsOn(['projectExportJava'])
configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
}
}
doFirst {
projectImportJava.configuration = saagie {
project {
id = saagieprojectid
}
importArtifacts {
import_file = exportLocation + '/job/java.zip'
temporary_directory= tmpLocation
}
}
}
description= 'import java job for saagie'
taskName = 'projectImportJava'
}
task projectImportR( // <======== Import artifacts, see that we use the gradle dependOn
type: ProjectsImportJobTask) {
dependsOn(['projectExportR'])
configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
}
}
doFirst {
projectImportR.configuration = saagie {
project {
id = saagieprojectid
}
importArtifacts {
import_file = exportLocation + '/job/r.zip'
temporary_directory= tmpLocation
}
}
}
description= 'import R job for saagie'
taskName = 'projectImportR'
}
task projectImportTalend( // <======== Import artifacts, see that we use the gradle dependOn
type: ProjectsImportJobTask) {
dependsOn(['projectExportTalend'])
configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
}
}
doFirst {
projectImportTalend.configuration = saagie {
project {
id = saagieprojectid
}
importArtifacts {
import_file = exportLocation + '/job/talend.zip'
temporary_directory= tmpLocation
}
}
}
description= 'import Talend job for saagie'
taskName = 'projectImportTalend'
}
task projectImportPython( // <======== Import artifacts, see that we use the gradle dependOn
type: ProjectsImportJobTask) {
dependsOn(['projectExportPython'])
configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
}
}
doFirst {
projectImportPython.configuration = saagie {
project {
id = saagieprojectid
}
importArtifacts {
import_file = exportLocation + '/job/python.zip'
temporary_directory= tmpLocation
}
}
}
description= 'import Python job for saagie'
taskName = 'projectImportPython'
}
task projectImportDocker( // <======== Import artifacts, see that we use the gradle dependOn
type: ProjectsImportJobTask) {
dependsOn(['projectExportDocker'])
configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
}
}
doFirst {
projectImportDocker.configuration = saagie {
project {
id = saagieprojectid
}
importArtifacts {
import_file = exportLocation + '/job/docker.zip'
temporary_directory= tmpLocation
}
}
}
description= 'import Docker job for saagie'
taskName = 'projectImportDocker'
}
task projectImportPipelineWithJobandVersion( // <======== Import artifacts, see that we use the gradle dependOn
type: ProjectsImportJobTask) {
dependsOn(['projectExportPipelineWithJobandVersion'])
configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
}
}
doFirst {
projectImportPipelineWithJobandVersion.configuration = saagie {
project {
id = saagieprojectid
}
importArtifacts {
import_file = exportLocation + '/pipeline/pipelinejobver.zip'
temporary_directory= tmpLocation
}
}
}
description= 'Import Pipeline with Dependency Job and all versions'
taskName = 'projectImportPipelineWithJobandVersion'
}
task projectImportPipeline( // <======== Import artifacts, see that we use the gradle dependOn
type: ProjectsImportJobTask) {
dependsOn(['projectExportPipeline'])
configuration = saagie {
server {
url = saagieurl
login = saagieuserid
password = saagiepassword
environment = saagieplatformid
jwt = true
}
}
doFirst {
projectImportPipeline.configuration = saagie {
project {
id = saagieprojectid
}
importArtifacts {
import_file = exportLocation + '/pipeline/pipeline.zip'
temporary_directory= tmpLocation
}
}
}
description= 'Import Pipeline with Dependency Job and all versions'
taskName = 'projectImportPipeline'
}
task projectTestAll( // <======== Import artifacts, see that we use the gradle dependOn
type:Exec) {
dependsOn(['projectImportSqoop','projectImportSpark','projectImportJava','projectImportR','projectImportTalend','projectImportPython','projectImportDocker', 'projectImportPipeline', 'projectImportPipelineWithJobandVersion' ])
commandLine 'echo','Testing'
}
Execute this script using this commande :
gradle -b build.gradle projectTestAll
Bug behavior:
> Task :projectExportPipeline
{“status”:“success”,“exportfile”:“./pipeline/pipeline.zip”}
> Task :projectImportPipeline FAILED
Something went wrong when creating the pipeline: {“errors”:[{“message”:“Pipeline not valid”,“extensions”:{“pipeline”:“not allowed”,“classification”:“ValidationError”}}],“data”:null}
FAILURE: Build failed with an exception.
* What went wrong:
Execution failed for task ‘:projectImportPipeline’.
> Something went wrong when creating the pipeline: {“errors”:[{“message”:“Pipeline not valid”,“extensions”:{“pipeline”:“not allowed”,“classification”:“ValidationError”}}],“data”:null}
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
BUILD FAILED in 6s
2 actionable tasks: 2 executed
Expected Behavior ( use case that work's fine ) If you got this response:
gradle -b build.tasks.gradle projectTestAll
> Task :projectExportDocker
{"status":"success","exportfile":"/home/amine/Desktop/test_gradle/job/docker.zip"}
> Task :projectImportDocker
{status=success, job=[{id=22712, name=jupyter/scipy-notebook}], pipeline=[]}
> Task :projectExportJava
{"status":"success","exportfile":"/home/amine/Desktop/test_gradle/job/java.zip"}
> Task :projectImportJava
{status=success, job=[{id=21754, name=RetrieveCustomerData}], pipeline=[]}
> Task :projectExportPipeline
{"status":"success","exportfile":"/home/amine/Desktop/test_gradle/pipeline/pipeline.zip"}
> Task :projectImportPipeline FAILED
Some of the jobs contained in the pipeline version doesn't exist in targeted platform.
FAILURE: Build failed with an exception.
* What went wrong:
Execution failed for task ':projectImportPipeline'.
> Missing job names not found on the target platform => : [Wine Import, Analyze Wine]
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
BUILD FAILED in 16s
6 actionable tasks: 6 executed
Then everything works fine.
As shown in the gif the build was successful for the tasks script
Bug behavior: When we export pipeline from v1 without setting the
include_jobs
to true. it will include all the jobs in the export process anyway.Fix behavior: When we export pipeline from v1 without setting the
include_jobs
to true. it will export only the pipeline without the jobs. And in the import process if the jobs from the pipeline version doesn't exist on the target platform then we need to get an error message showing the list of the missing jobs name like shown below