kirin-ri / memo

0 stars 0 forks source link

330 #19

Open kirin-ri opened 7 months ago

kirin-ri commented 7 months ago
import logging
import os
import secrets
import string
# import time

import requests
from ap.getInitScmDemo import getInitScmDemo
from retry import retry
from tools.copyCatalogAP import copyCatalogAP
from tools.copyDatabricks import copyDatabricks
from tools.copyDPB import copyDPB
from tools.copyGithub import copyMetrics, makeRepository, pushTag
from tools.copyJenkins import copyJenkins
from tools.copySnow import copySnow
from tools.copyArango import copyArango
from tools.copyTiDB import copyTiDB
# from tools.pollingJenkinsJob import pollingJenkinsJob
from tools.snowflakeAccessor import SnowflakeAccessor
from werkzeug.exceptions import Conflict, InternalServerError

PROJECT = os.getenv("PROJECT")
ENV = os.getenv("ENV")

# logger初期化
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)

class RetryException(Exception):
    pass

def _conflictCheck(project: str):
    try:
        obj = SnowflakeAccessor()
        query = f"""
        SELECT
            PROJECT
        FROM
            PROJECT_META_INFO
        WHERE
            PROJECT = '{project}'
        LIMIT 1;
        """
        res = obj.execute(query)
        data = None
        for i in res:
            data = i[0]
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        logger.info("---error:ConflictCheck Snowflake Connection")
        raise InternalServerError
    finally:
        obj.close()

    return data

def _dbAndSchemaExistsCheck(db: str, schema: str, snowAccount: str,
                            snowUser: str, snowPass: str, snowWh: str):
    # db check
    try:
        obj = SnowflakeAccessor(account=snowAccount, user=snowUser,
                                password=snowPass, warehouse=snowWh)
        query = f"""
            SHOW DATABASES LIKE '{db}';
        """
        res = obj.execute(query)
        resDb = None
        for i in res:
            resDb = i[0]
        if not resDb:
            logger.info("---error:DBExistsCheck DB is Not Exists")
            raise InternalServerError
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        raise e
    finally:
        obj.close()

    # schema check
    try:
        if not schema:
            schema = "PUBLIC"
        obj = SnowflakeAccessor(account=snowAccount, user=snowUser,
                                password=snowPass, warehouse=snowWh,
                                database=db)
        query = f"""
            SELECT
              schema_name
            FROM
              information_schema.schemata
            WHERE
              SCHEMA_NAME = '{schema}';
        """
        res = obj.execute(query)
        resSchema = None
        for i in res:
            resSchema = i[0]
        if not resSchema:
            logger.info("---error:SchemaExistsCheck Schema is Not Exists")
            raise InternalServerError
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        raise e
    finally:
        obj.close()

def _copyPostprocess(project: str, ENV: str, sub_domain: str,
                     snowWh: str, inputDB: str, inputSchema: str, arangoDb: str):
    # 個社別環境情報更新
    try:
        obj = SnowflakeAccessor()
        query = f"""
        UPDATE
            PROJECT_META_INFO
        SET
            CATALOG_SUB_DOMAIN='{sub_domain}'
        WHERE
            PROJECT='{project}';
        """
        obj.execute(query)
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        logger.info("---error:Postprocess update metadata")
        raise InternalServerError
    finally:
        obj.close()

    # Catalog APの立ち上がり待ち
    # _pollingCatalogAp(sub_domain)

    # 初期データ登録 Catalog AP移植後
    # if enableDplApi:
    getInitScmDemo(project, ENV, snowWh, inputDB, inputSchema, arangoDb)

    return

def _insertMetaInfo(
    project: str,
    project_jp: str,
    domain: str,
    snowWh: str,
    cluster_id: str,
    host: str,
    username: str,
    password: str,
    enableDplApi: bool,
    enableCreatingMetrics: bool,
    inputCustomInfo=''
):
    # 個社別環境情報登録
    try:
        obj = SnowflakeAccessor()
        if inputCustomInfo:
            query = f"""
            INSERT INTO PROJECT_META_INFO(
              PROJECT, DISPLAY_NAME, DOMAIN, CATALOG_SUB_DOMAIN,
              WAREHOUSE, TIDB_HOST, TIDB_USERNAME, TIDB_PASSWORD,
              TIDB_CLUSTER_ID, ENABLE_DPL_API, ENABLE_CREATING_METRICS,
              CUSTOM_INPUT
            )
            SELECT
              '{project}', '{project_jp}', '{domain}', Null,
              '{snowWh}', '{host}', '{username}', '{password}',
              '{cluster_id}','{enableDplApi}','{enableCreatingMetrics}',
              to_variant({inputCustomInfo});
            """
        else:
            query = f"""
            INSERT
                INTO PROJECT_META_INFO
            VALUES(
                '{project}', '{project_jp}', '{domain}', Null,
                '{snowWh}', '{host}', '{username}', '{password}',
                '{cluster_id}', '{enableDplApi}', '{enableCreatingMetrics}',
                Null
            );
            """
        obj.execute(query)
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        logger.info("---error: Insert metadata")
        raise InternalServerError
    finally:
        obj.close()

    return

@retry(exceptions=(RetryException), tries=60, delay=5)
def _pollingCatalogAp(sub_domain: str):
    url = f"https://{sub_domain}"
    logger.info(f"--- Polling: {url}")

    try:
        ret = requests.get(url)
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        raise RetryException

    if ret.status_code != 200:
        logger.info("--- error:Polling")
        raise RetryException

    return

# コピー環境デプロイ機能:メイン処理
def postDeployCopyEnv(json: dict):
    # 引数取得
    domain = json["domain"]
    project = json["project"]
    projectJP = json["projectJP"]
    snowUrl = json["snowUrl"].replace('https://', '')
    snowAccount = snowUrl.replace(
        'https://', '').replace('.snowflakecomputing.com', '')
    snowUser = json["snowUser"]
    snowPass = json["snowPass"]
    snowWh = json["snowWh"]
    clientId = json["clientId"]
    clientSecret = json["clientSecret"]
    tenantId = json["tenantId"]
    inputDB = json["inputDB"]
    inputSchema = json["inputSchema"]

    # 画面でWHが入力されていない場合は命名規則に従って作成
    if snowWh == "":
        snowWh = f"IND_{project}_{ENV}_WH".upper()

    # API機能有効フラグチェック
    if json["enableDplApi"] == "true":
        enableDplApi = True
        enableCustomInput = False
        inputDB = f"{project}_{ENV}".upper()
    else:
        enableDplApi = False
        # 独自DB、SCHEMA使用チェック
        if not inputDB:
            inputDB = f"{project}_{ENV}".upper()
            enableCustomInput = False
        else:
            enableCustomInput = True

    # メトリクス追加作成機能有効フラグチェック
    if json["enableCreatingMetrics"] == "true":
        enableCreatingMetrics = True
    else:
        enableCreatingMetrics = False

    # 登録済みチェック
    res = _conflictCheck(project)
    if res:
        raise Conflict
    if PROJECT == project:
        raise Conflict

    # Snowflakeコピー処理
    try:
        copySnow(project, snowAccount, snowUser, snowPass, snowWh)
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        logger.info("---error:Snowflake copy")
        raise InternalServerError

    # ArangoDBコピー処理
    try:
        arangoUser = f"IND_{project}_{ENV}_USER".upper()
        arangoDb = f"IND_{project}_{ENV}_DB".upper()
        # パスワード生成
        chars = string.ascii_letters + string.digits
        arangoPwd = ''.join(secrets.choice(chars) for x in range(8))
        copyArango(arangoUser, arangoPwd, arangoDb)
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        logger.info("---error:ArangoDB copy")
        raise InternalServerError

    # DBおよびSchema存在チェック
    if not enableDplApi:
        try:
            _dbAndSchemaExistsCheck(inputDB, inputSchema, snowAccount,
                                    snowUser, snowPass, snowWh)
        except Exception as e:
            raise e
        if enableCustomInput:
            customInputInfo = {
                "DB": inputDB,
                "SCHEMA": inputSchema
            }

    # TiDBのコピー処理
    if enableCreatingMetrics:
        try:
            cluster_id, host, username, password = copyTiDB(project)
        except Exception as e:
            logger.error(f"{e}", exc_info=True)
            logger.info("---error:TiDB copy")
            raise InternalServerError
    else:
        cluster_id = ''
        host = ''
        username = ''
        password = ''

    # メタ情報の登録
    if enableCustomInput:
        _insertMetaInfo(project, projectJP, domain, snowWh,
                        cluster_id, host, username, password,
                        enableDplApi, enableCreatingMetrics,
                        customInputInfo
                        )
    else:
        _insertMetaInfo(project, projectJP, domain, snowWh,
                        cluster_id, host, username, password,
                        enableDplApi, enableCreatingMetrics,
                        )

    # DPB/DPLコピー処理
    if enableDplApi:
        try:
            copyDPB(project, enableCreatingMetrics)
        except Exception as e:
            logger.error(f"{e}", exc_info=True)
            logger.info("---error:DPB/DPL copy")
            raise InternalServerError

    # Databricksコピー処理
    try:
        res = copyDatabricks(project, snowUrl, snowUser,
                             snowPass, snowWh, inputDB)
        clusterId = res["cluster_id"]
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        logger.info("---error:Databricks copy")
        raise InternalServerError

    # Githubコピー処理①
    try:
        resultGithub = makeRepository(project=project,enableDplApi=enableDplApi,enableCustomInput=enableCustomInput,clusterId=clusterId,
                                       inputDB=inputDB, inputSchema=inputSchema)
        repoName = resultGithub["repoName"]
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        logger.info(f"---error:Github make repository:{e}")
        raise InternalServerError

    # Jenkinsコピー処理
    try:
        res = copyJenkins(project)
        jobNames = res["job_names"]
        tmp = [item for item in jobNames if "metrics-CD" in item]
        jobNameCD = tmp[0]
        tmp = [item for item in jobNames if "metrics-CI" in item]
        jobNameCI = tmp[0]
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        logger.info("---error:Jenkins copy")
        raise InternalServerError

    # # Githubコピー処理②
    # try:
    #     if enableCustomInput:
    #         resultGithub = copyMetrics(repoName, project, clusterId,
    #                                    inputDB, inputSchema)
    #     else:
    #         resultGithub = copyMetrics(repoName, project, clusterId, inputDB)
    # except Exception as e:
    #     logger.error(f"{e}", exc_info=True)
    #     logger.info("---error:Github make repository")
    #     raise InternalServerError

    # if enableDplApi:
    #     # pushTag
    #     try:
    #         pushTag(repoName)
    #     except Exception as e:
    #         logger.error(f"{e}", exc_info=True)
    #         logger.info("---error:Push Tag")
    #         raise InternalServerError

    # Catalog APコピー処理
    try:
        resultCatalogAP = copyCatalogAP(project, snowAccount,
                                        snowUser, snowPass,
                                        snowWh, enableDplApi,
                                        enableCreatingMetrics,
                                        clientId, tenantId,
                                        clientSecret, arangoPwd)
        sub_domain = resultCatalogAP["sub_domain"]
    except Exception as e:
        logger.error(f"{e}", exc_info=True)
        logger.info("---error:CatalogAP copy")
        raise InternalServerError

    # 後処理
    _copyPostprocess(project, ENV, sub_domain, snowWh, inputDB, inputSchema, arangoDb)

    return {"message": "OK"}
kirin-ri commented 7 months ago
def copyJob2(
    job_name: str,
    project: str,
    databricks_token: str = None,
    databricks_url: str = None,
):
    logger.info("--- copy job start")
    # jenkinsに接続する
    server = jenkins.Jenkins(f"http://{HOST}", username=USER, password=PWD)

    # templateとするジョブをコピー
    logger.info("--- get jenkins job config")
    job_config = server.get_job_config(job_name)
    tmp_job_xml = f"tmp_xml/{job_name}.xml"
    with open(tmp_job_xml, mode="w") as f:
        f.write(job_config)
    tree = ET.parse(tmp_job_xml)

    # XMLを取得
    root = tree.getroot()
    # 編集するテキストをタグ名で探し編集
    # webhook対象リポジトリ(リポジトリ名に置き換え)
    logger.info("--- replace github url")
    for t in root.iter("url"):
        # logger.info(t.text)
        if type(t.text) is str:
            t.text = t.text.replace("scm-metrics", f"{project}-scm-metrics")
        # logger.info(t.text)
    if databricks_url:
        logger.info("--- replace databricks url")
        # DataBricksの接続先(構築したDataBricksのURLに置き換え)
        for t in root.iter("command"):
            # logger.info(t.text)
            if type(t.text) is str:
                t.text = t.text.replace(DATABRICKS_URL, databricks_url)
            # logger.info(t.text)

    if databricks_token:
        logger.info("--- replace databricks token")
        # DataBricksの接続TOKEN名(生成したTOKEN名に置き換え)
        for t in root.iter("credentialsId"):
            # logger.info(t.text)
            if type(t.text) is str:
                t.text = t.text.replace("DB_DEV_TOKEN", f"{project}-token")
            # logger.info(t.text)

    new_job_name = f"{project}-{job_name}"
    new_job_xml = f"tmp_xml/{new_job_name}.xml"
    # ElementTreeオブジェクトに変更を反映し,ファイルに書き出す
    new_tree = ET.ElementTree(root)
    new_tree.write(new_job_xml, encoding="utf-8", xml_declaration=True)
    logger.info("--- create jenkins job")
    with open(new_job_xml, mode="r") as f:
        s = f.read()
        # Jenkinsに新規ジョブを作成(個社名等に置き換え)
        server.create_job(new_job_name, s)
    logger.info("--- copy job end")
    return new_job_name
kirin-ri commented 7 months ago
def copyJob2(
    job_name: str,
    project: str,
    databricks_token: str = None,
    databricks_url: str = None,
):
    logger.info("--- copy job start")
    # jenkinsに接続する
    server = jenkins.Jenkins(f"http://{HOST}", username=USER, password=PWD)

    # templateとするジョブをコピー
    logger.info("--- get jenkins job config")
    job_config = server.get_job_config(job_name)
    tmp_job_xml = f"tmp_xml/{job_name}.xml"
    with open(tmp_job_xml, mode="w") as f:
        f.write(job_config)
    tree = ET.parse(tmp_job_xml)

    # XMLを取得
    root = tree.getroot()

    new_job_name = f"{project}-Catalog-AP-CD"
    new_job_xml = f"tmp_xml/{new_job_name}.xml"
    # ElementTreeオブジェクトに変更を反映し,ファイルに書き出す
    new_tree = ET.ElementTree(root)
    new_tree.write(new_job_xml, encoding="utf-8", xml_declaration=True)
    logger.info("--- create jenkins job")
    with open(new_job_xml, mode="r") as f:
        s = f.read()
        # Jenkinsに新規ジョブを作成(個社名等に置き換え)
        server.create_job(new_job_name, s)
    logger.info("--- copy job end")
    return new_job_name
kirin-ri commented 7 months ago
for branch_specifier in root.iter('branchSpecifiers'):  # 假设是 'branchSpecifiers',根据实际情况可能有所不同
    for branch in branch_specifier.findall('hudson.plugins.git.BranchSpec'):
        name = branch.find('name')
        if name is not None:
            name.text = '*/main'  # 设置为监听 'main' 分支
kirin-ri commented 7 months ago
<project>
<actions/>
<description/>
<keepDependencies>false</keepDependencies>
<properties>
<org.datadog.jenkins.plugins.datadog.DatadogJobProperty plugin="datadog@5.5.0">
<enableFile>false</enableFile>
<tagFile/>
<enableProperty>false</enableProperty>
<tagProperties/>
<emitSCMEvents>true</emitSCMEvents>
</org.datadog.jenkins.plugins.datadog.DatadogJobProperty>
<jenkins.model.BuildDiscarderProperty>
<strategy class="hudson.tasks.LogRotator">
<daysToKeep>-1</daysToKeep>
<numToKeep>2</numToKeep>
<artifactDaysToKeep>-1</artifactDaysToKeep>
<artifactNumToKeep>-1</artifactNumToKeep>
</strategy>
</jenkins.model.BuildDiscarderProperty>
</properties>
<scm class="hudson.plugins.git.GitSCM" plugin="git@5.2.0">
<configVersion>2</configVersion>
<userRemoteConfigs>
<hudson.plugins.git.UserRemoteConfig>
<url>git@github.com:qmonus-test/catalog-web-app.git</url>
</hudson.plugins.git.UserRemoteConfig>
</userRemoteConfigs>
<branches>
<hudson.plugins.git.BranchSpec>
<name>refs/tags/mock-cccc-*</name>
</hudson.plugins.git.BranchSpec>
</branches>
<doGenerateSubmoduleConfigurations>false</doGenerateSubmoduleConfigurations>
<submoduleCfg class="empty-list"/>
<extensions/>
</scm>
<canRoam>true</canRoam>
<disabled>false</disabled>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<triggers>
<org.jenkinsci.plugins.gwt.GenericTrigger plugin="generic-webhook-trigger@1.86.5">
<spec/>
<genericVariables>
<org.jenkinsci.plugins.gwt.GenericVariable>
<expressionType>JSONPath</expressionType>
<key>ref</key>
<value>$.ref</value>
<regexpFilter>refs/tags/</regexpFilter>
<defaultValue/>
</org.jenkinsci.plugins.gwt.GenericVariable>
</genericVariables>
<regexpFilterText>$ref</regexpFilterText>
<regexpFilterExpression>^(mock-cccc-.+)$</regexpFilterExpression>
<printPostContent>false</printPostContent>
<printContributedVariables>false</printContributedVariables>
<causeString>Generic Cause</causeString>
<token>tag-push</token>
<tokenCredentialId/>
<silentResponse>false</silentResponse>
<overrideQuietPeriod>false</overrideQuietPeriod>
<shouldNotFlattern>false</shouldNotFlattern>
<allowSeveralTriggersPerBuild>false</allowSeveralTriggersPerBuild>
</org.jenkinsci.plugins.gwt.GenericTrigger>
</triggers>
<concurrentBuild>false</concurrentBuild>
<builders>
<hudson.tasks.Shell>
<command>#!/bin/bash echo "タグの情報:$REF" # タグのpushを検知したらDockerイメージのビルドとリリースを行う if [[ "$REF" == mock-cccc-* ]]; then appName="catalog-web-app" setAppName="cccc-catalog-web-app" version=$(echo "$REF" | cut -d "-" -f 3) imageTag="idnaacr.azurecr.io/${appName}:$version" docker login "${ACR_LOGINSERVER}" -u "${CREDENTIALS_USR}" -p "${CREDENTIALS_PSW}" # ACR上に指定のタグのイメージが存在するか確認 if docker pull $imageTag > /dev/null 2>&1; then # イメージが存在する場合はAKSにセット echo "found" docker pull $imageTag kubectl set image deployment/$setAppName $setAppName=$imageTag else echo "ACR上に指定のタグのイメージが存在しません。デプロイをスキップします。" fi else echo "タグの情報が無効です。ビルドを終了します。" fi </command>
<configuredLocalRules/>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers/>
</project>
kirin-ri commented 7 months ago
import xml.etree.ElementTree as ET

# 加载 XML 文件
xml_path = 'path_to_your_xml_file.xml'
tree = ET.parse(xml_path)
root = tree.getroot()

# 对 XML 文本内容进行替换
def replace_text(element):
    if element.text:
        element.text = element.text.replace('cccc', 'aaaa')
    for child in element:
        replace_text(child)

# 对属性值进行替换
def replace_attributes(element):
    for attr in element.attrib:
        element.set(attr, element.attrib[attr].replace('cccc', 'aaaa'))
    for child in element:
        replace_attributes(child)

# 应用替换
replace_text(root)
replace_attributes(root)

# 保存修改后的 XML 文件
tree.write('modified_xml_file.xml')
kirin-ri commented 7 months ago
    # 确保临时 XML 目录存在
    tmp_xml_dir = "tmp_xml"
    os.makedirs(tmp_xml_dir, exist_ok=True)

    tmp_job_xml = f"{tmp_xml_dir}/{job_name}.xml"
    with open(tmp_job_xml, mode="w") as f:
        f.write(job_config)

    tree = ET.parse(tmp_job_xml)
    root = tree.getroot()

    # 在 XML 中替换 'cccc' 为 'aaaa'
    replace_in_element(root)
kirin-ri commented 7 months ago
kirin-ri commented 7 months ago
def replace_in_element(element):
    """递归地在元素的文本和属性中替换 'cccc' 为 'aaaa'。"""
    if element.text:
        element.text = element.text.replace('cccc', 'aaaa')
    for attr in element.attrib:
        element.attrib[attr] = element.attrib[attr].replace('cccc', 'aaaa')
    for child in element:
        replace_in_element(child)
kirin-ri commented 7 months ago
2024-04-09 03:58:52,084 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  {"error_code":"RESOURCE_ALREADY_EXISTS","message":"Scope jenkins3 already exists!"}
2024-04-09 03:58:52,084 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  --- scopes create end
2024-04-09 03:58:52,084 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  --- secrets put start
2024-04-09 03:58:52,084 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  --- put secrets: sfUrl
2024-04-09 03:58:52,338 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  200
2024-04-09 03:58:52,339 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  {}
2024-04-09 03:58:52,339 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  --- put secrets: sfUser
2024-04-09 03:58:52,408 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  200
2024-04-09 03:58:52,408 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  {}
2024-04-09 03:58:52,408 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  --- put secrets: sfPassword
2024-04-09 03:58:52,472 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  200
2024-04-09 03:58:52,473 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  {}
2024-04-09 03:58:52,473 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  --- put secrets: sfDatabase
2024-04-09 03:58:52,543 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  200
2024-04-09 03:58:52,543 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  {}
2024-04-09 03:58:52,543 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  --- put secrets: sfWarehouse
2024-04-09 03:58:52,646 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  200
2024-04-09 03:58:52,647 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  {}
2024-04-09 03:58:52,647 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  --- put secrets: sfRole
2024-04-09 03:58:52,721 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  200
2024-04-09 03:58:52,722 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  {}
2024-04-09 03:58:52,722 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  --- secrets put end
2024-04-09 03:58:52,722 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  ----- copy databricks end
2024-04-09 03:59:03,077 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  ----- copy Jenkins start
2024-04-09 03:59:03,078 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  --- copy job start
2024-04-09 03:59:03,078 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  --- get jenkins job config
2024-04-09 03:59:03,592 ERROR -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  replace_in_element() missing 1 required positional argument: 'project'
Traceback (most recent call last):
  File "/home/uenv/q_li/Desktop/i-dna-webapp/server/ap/postDeployCopyEnv.py", line 353, in postDeployCopyEnv
    res = copyJenkins(project)
  File "/home/uenv/q_li/Desktop/i-dna-webapp/server/tools/copyJenkins.py", line 164, in copyJenkins
    jn = copyJob2(project)
  File "/home/uenv/q_li/Desktop/i-dna-webapp/server/tools/copyJenkins.py", line 124, in copyJob2
    replace_in_element(root,project)
  File "/home/uenv/q_li/Desktop/i-dna-webapp/server/tools/copyJenkins.py", line 149, in replace_in_element
    replace_in_element(child)
TypeError: replace_in_element() missing 1 required positional argument: 'project'
2024-04-09 03:59:03,593 INFO -20240409-2d9a4700-dda9-4d3b-8987-edca2fcc682e  ---error:Jenkins copy
kirin-ri commented 7 months ago
echo "タグの情報:$REF"

# タグのpushを検知したらDockerイメージのビルドとリリースを行う
if [[ "$REF" == mock-jenkins3-* ]]; then
    appName="catalog-web-app"
    setAppName="jenkins3-catalog-web-app"
    version=$(echo "$REF" | cut -d "-" -f 3)
    imageTag="idnaacr.azurecr.io/${appName}:$version"
kirin-ri commented 7 months ago
echo "タグの情報:$REF"

# タグのpushを検知したらDockerイメージのビルドとリリースを行う
if [[ "$REF" == mock-jenkins3-* ]]; then
    appName="catalog-web-app"
    setAppName="jenkins3-catalog-web-app"
    version=$(echo "$REF" | cut -d "-" -f 3)
    imageTag="idnaacr.azurecr.io/${appName}:$version"
kirin-ri commented 7 months ago

version=$(echo "$REF" | cut -d "-" -f 3-)

kirin-ri commented 7 months ago

https://github.com/qmonus-test/jenkins/blob/main/job-shell/tmp-Catalog-AP-CD.xml

kirin-ri commented 7 months ago

raw.githubusercontent.com

kirin-ri commented 7 months ago
    new_job_name = f"{project}-Catalog-AP-CD"
    new_job_xml = f"tmp_xml/{new_job_name}.xml"
    # ElementTreeオブジェクトに変更を反映し,ファイルに書き出す
    new_tree = ET.ElementTree(root)
    new_tree.write(new_job_xml, encoding="utf-8", xml_declaration=True)
    logger.info("--- create jenkins job")
    with open(new_job_xml, mode="r") as f:
        s = f.read()
        # Jenkinsに新規ジョブを作成(個社名等に置き換え)
        server.create_job(new_job_name, s)
    logger.info("--- copy job end")
    return new_job_name

def replace_in_element(element,replace_test):
    if element.text:
        element.text = element.text.replace('tmp', replace_test)
    for attr in element.attrib:
        element.attrib[attr] = element.attrib[attr].replace('tmp', replace_test)
    for child in element:
        replace_in_element(child,replace_test)
kirin-ri commented 7 months ago
import xml.etree.ElementTree as ET

def replace_in_element(element, old_text, new_text):
    """
    指定されたエレメント(および子エレメント)内のテキストと属性を再帰的に検索し、
    old_textをnew_textに置換します。
    """
    if element.text and old_text in element.text:
        element.text = element.text.replace(old_text, new_text)
    for attr in element.attrib:
        if old_text in element.attrib[attr]:
            element.attrib[attr] = element.attrib[attr].replace(old_text, new_text)
    for child in element:
        replace_in_element(child, old_text, new_text)

def create_and_update_job(project, stage):
    new_job_name = f"{project}-Catalog-AP-CD"
    new_job_xml = f"tmp_xml/{new_job_name}.xml"

    # ここでXMLファイルを読み込んでrootを取得する(例示のためのプレースホルダ)
    root = ET.parse('path/to/your/original/xml/file').getroot()

    # ステージが'stg'の場合、'mock'を'demo'に置換
    if stage == "stg":
        replace_in_element(root, 'mock', 'demo')

    # ElementTreeオブジェクトに変更を反映し,ファイルに書き出す
    new_tree = ET.ElementTree(root)
    new_tree.write(new_job_xml, encoding="utf-8", xml_declaration=True)

    logger.info("--- create jenkins job")
    with open(new_job_xml, mode="r") as f:
        s = f.read()
        # Jenkinsに新規ジョブを作成(個社名等に置き換え)
        server.create_job(new_job_name, s)
    logger.info("--- copy job end")
    return new_job_name
kirin-ri commented 7 months ago

N7n#tXz&LiCn

kirin-ri commented 7 months ago

aO8TA|bNdBeW

kirin-ri commented 7 months ago
def generate_field(field_props):
    field_type = field_props['type']
    validators = []
    if 'pattern' in field_props:
        validators.append(validate.Regexp(regex=field_props['pattern']))

    required = field_props.get('required', False)

    if 'x-validations' in field_props:
        max_length = field_props.get(
            'x-validations', {}).get('byteLength', {}).get('max')
        if max_length:
            validators.append(validate.Length(max=max_length))

    if field_type == 'string':
        return fields.Str(validate=validators, required=required)
    elif field_type == 'integer':
        return fields.Integer(validate=validators, required=required)
    elif field_type == 'number':
        return fields.Float(validate=validators, required=required)
    elif field_type == 'boolean':
        return fields.Boolean(validate=validators, required=required)
    elif field_type == 'date':
        return fields.Date(validate=validators, required=required)
kirin-ri commented 7 months ago
        if type == "string":
            items[p_name] = {
                "type": type,
                "x-validations": {
                    "byteLength": {
                        "max": 255,
                    },
                },
            }

        elif type == "integer":
            items[p_name] = {
                "type": "integer",
                "format": "int32",
            }

        elif type == "long":
            items[p_name] = {
                "type": "integer",
                "format": "int64",
            }

        elif type == "float":
            items[p_name] = {
                "type": "number",
                "format": "float",
            }

        elif type == "double":
            items[p_name] = {
                "type": "number",
                "format": "double",
            }

        elif type == "byte":
            items[p_name] = {
                "type": "string",
                "format": "byte",
            }

        elif type == "binary":
            items[p_name] = {
                "type": "binary",
            }

        elif type == "boolean":
            items[p_name] = {
                "type": type,
            }

        elif type == "date":
            items[p_name] = {
                "type": "string",
                "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$",
            }

        elif type == "dateTime":
            items[p_name] = {
                "type": "string",
                "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$",
            }

        elif type == "password":
            items[p_name] = {
                "type": "string",
                "format": "password",
            }
kirin-ri commented 7 months ago
      - description: validationチェック用
        in: body
        name: bodyStr
        required: true
        schema:
          properties:
            data:
              properties:
                binary:
                  description: binary
                  required: true
                  type: binary
                boolean:
                  description: boolean
                  required: true
                  type: boolean
                byte:
                  description: byte
                  format: byte
                  required: true
                  type: string
                date:
                  description: date
                  pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$
                  required: true
                  type: string
                dateTime:
                  description: dateTime
                  pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$
                  required: true
                  type: string
                double:
                  description: double
                  format: double
                  required: true
                  type: number
                float:
                  description: float
                  format: float
                  required: true
                  type: number
                integer:
                  description: integer
                  format: int32
                  required: true
                  type: integer
                long:
                  description: long
                  format: int64
                  required: true
                  type: integer
                string:
                  description: string
                  required: true
                  type: string
                  x-validations:
                    byteLength:
                      max: 255
kirin-ri commented 7 months ago
from marshmallow import fields, validate, Schema

def generate_field(field_name, field_props):
    validators = []
    required = field_props.get('required', False)

    # 对正则表达式的验证
    if 'pattern' in field_props:
        validators.append(validate.Regexp(regex=field_props['pattern']))

    # 对最大长度的验证
    max_length = field_props.get('x-validations', {}).get('byteLength', {}).get('max')
    if max_length is not None:
        validators.append(validate.Length(max=max_length))

    field_type = field_props['type']
    field = None

    if field_type == 'string' or field_type == 'byte':
        field = fields.String(validate=validators, required=required)
    elif field_type == 'integer':
        field = fields.Integer(validate=validators, required=required)
    elif field_type == 'number' or field_type == 'float' or field_type == 'double':
        # 对于double和float类型,将数值作为字符串处理以避免精度问题
        field = fields.Float(validate=validators, required=required, as_string=True)
    elif field_type == 'boolean':
        field = fields.Boolean(required=required)
    elif field_type == 'date':
        field = fields.Date(validate=validators, required=required)
    elif field_type == 'dateTime':
        field = fields.DateTime(validate=validators, required=required)
    elif field_type == 'long':
        # Python中没有专门的长整型,使用Integer可以满足需求
        field = fields.Integer(validate=validators, required=required)
    elif field_type == 'binary':
        # 通常binary数据以Base64编码的字符串处理
        field = fields.String(validate=validators, required=required)

    return field