From fe3ec2b572906c9047e81c519aa4fe396ff41bbc Mon Sep 17 00:00:00 2001 From: "Ali(Ako) Hosseini" Date: Fri, 19 Jul 2024 11:44:35 +0800 Subject: [PATCH 1/3] ci: test the values --- .github/actions/generate_sitemap_and_robots/action.yml | 2 ++ .github/modify_robots.js | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/actions/generate_sitemap_and_robots/action.yml b/.github/actions/generate_sitemap_and_robots/action.yml index 18569d4..a684e90 100644 --- a/.github/actions/generate_sitemap_and_robots/action.yml +++ b/.github/actions/generate_sitemap_and_robots/action.yml @@ -16,7 +16,9 @@ runs: domain_name=$(echo $MAIN_DOMAIN | sed 's|https://||g') mkdir -p content/$domain_name cp sitemap.xml content/$domain_name/sitemap.xml + cat sitemap.xml cp robots.txt content/$domain_name/robots.txt node .github/modify_sitemap.js --new-domain $domain_name --input-file content/$domain_name/sitemap.xml node .github/modify_robots.js --sitemap-url $domain_name --input-file content/$domain_name/robots.txt + cat content/$domain_name/sitemap.xml rm sitemap.xml robots.txt diff --git a/.github/modify_robots.js b/.github/modify_robots.js index 686baa7..aaa84b4 100644 --- a/.github/modify_robots.js +++ b/.github/modify_robots.js @@ -13,11 +13,11 @@ function updateSitemap(inputFile, newSitemapUrl) { if (data.match(/^Sitemap:/m)) { newContent = data.replace( /^Sitemap: .*/m, - `Sitemap: https://seo.deriv.com/${newSitemapUrl}/sitemap.xml` + `Sitemap: https://urls.deriv.com/sitemap.xml` ); } else { newContent = - data + `\nSitemap: https://seo.deriv.com/${newSitemapUrl}/sitemap.xml`; + data + `\nSitemap: https://urls.deriv.com/sitemap.xml`; } if (data.match(/^Host:/m)) { newContent = newContent.replace(/^Host: .*/m, `Host: https://${newSitemapUrl}`); From ecdc7982a6130eacf084af7230a15bde9c5a2822 Mon Sep 17 00:00:00 2001 From: "Ali(Ako) Hosseini" Date: Fri, 19 Jul 2024 13:49:38 +0800 Subject: [PATCH 2/3] ci: replace urls in original sitemap file --- .../action.yml | 21 ++++++++++--------- .github/actions/upload_to_r2/action.yml | 5 ++++- .github/workflows/generate_sitemap.yml | 13 ++++-------- .github/workflows/publish-website.yml | 13 ++++-------- 4 files changed, 23 insertions(+), 29 deletions(-) rename .github/actions/{generate_sitemap_and_robots => generate_sitemap}/action.yml (50%) diff --git a/.github/actions/generate_sitemap_and_robots/action.yml b/.github/actions/generate_sitemap/action.yml similarity index 50% rename from .github/actions/generate_sitemap_and_robots/action.yml rename to .github/actions/generate_sitemap/action.yml index a684e90..79d1369 100644 --- a/.github/actions/generate_sitemap_and_robots/action.yml +++ b/.github/actions/generate_sitemap/action.yml @@ -1,24 +1,25 @@ -name: "Generate Sitemap.xml and Robots.txt" +name: "Generate Sitemap.xml" description: "Generate Sitemap.xml and robots.txt for all domains and upload to R2" inputs: main_domain: description: "Main domain for sitemap replacement" required: true + base_domain: + description: "Base domain for sitemap download" + required: true runs: using: "composite" steps: + - name: Download sitemap.xml + shell: bash + env: + BASE_DOMAIN: ${{ inputs.base_domain }} + run: curl --create-dirs -O --output-dir ./content -O $BASE_DOMAIN/sitemap.xml + - name: Replace sitemap URLs and create directories shell: bash env: MAIN_DOMAIN: ${{ inputs.main_domain }} run: | - mkdir content domain_name=$(echo $MAIN_DOMAIN | sed 's|https://||g') - mkdir -p content/$domain_name - cp sitemap.xml content/$domain_name/sitemap.xml - cat sitemap.xml - cp robots.txt content/$domain_name/robots.txt - node .github/modify_sitemap.js --new-domain $domain_name --input-file content/$domain_name/sitemap.xml - node .github/modify_robots.js --sitemap-url $domain_name --input-file content/$domain_name/robots.txt - cat content/$domain_name/sitemap.xml - rm sitemap.xml robots.txt + node .github/modify_sitemap.js --new-domain $domain_name --input-file ./content/sitemap.xml diff --git a/.github/actions/upload_to_r2/action.yml b/.github/actions/upload_to_r2/action.yml index a04fcd1..5a84fb4 100644 --- a/.github/actions/upload_to_r2/action.yml +++ b/.github/actions/upload_to_r2/action.yml @@ -13,6 +13,9 @@ inputs: r2_bucket_name: description: 'R2 Bucket Name' required: true + source_dir: + description: 'Source directory to upload' + required: true runs: using: 'composite' steps: @@ -23,5 +26,5 @@ runs: r2-access-key-id: ${{ inputs.r2_access_key_id }} r2-secret-access-key: ${{ inputs.r2_secret_access_key }} r2-bucket: ${{ inputs.r2_bucket_name }} - source-dir: ./content/deriv.com + source-dir: ${{ inputs.source_dir }} destination-dir: . diff --git a/.github/workflows/generate_sitemap.yml b/.github/workflows/generate_sitemap.yml index b826353..fd91a06 100644 --- a/.github/workflows/generate_sitemap.yml +++ b/.github/workflows/generate_sitemap.yml @@ -19,17 +19,11 @@ jobs: shell: bash run: npm install yargs - - name: Download sitemap.xml and robots.txt - shell: bash - env: - BASE_DOMAIN: "https://staging.deriv.com" - run: | - curl -O $BASE_DOMAIN/sitemap.xml -O $BASE_DOMAIN/robots.txt - - - name: Generate Sitemap and Robots.txt - uses: ./.github/actions/generate_sitemap_and_robots + - name: Generate Sitemap + uses: ./.github/actions/generate_sitemap with: main_domain: "https://deriv.com" + base_domain: "https://staging.deriv.com" - name: Upload to R2 uses: ./.github/actions/upload_to_r2 @@ -38,3 +32,4 @@ jobs: r2_access_key_id: ${{ secrets.R2_ACCESS_KEY_ID }} r2_secret_access_key: ${{ secrets.R2_SECRET_ACCESS_KEY }} r2_bucket_name: ${{ secrets.R2_BUCKET_NAME }} + source_dir: "./content" diff --git a/.github/workflows/publish-website.yml b/.github/workflows/publish-website.yml index 33cd944..52eac45 100644 --- a/.github/workflows/publish-website.yml +++ b/.github/workflows/publish-website.yml @@ -90,17 +90,11 @@ jobs: shell: bash run: npm install yargs - - name: Download sitemap.xml and robots.txt - shell: bash - env: - BASE_DOMAIN: "https://staging.deriv.com" - run: | - curl -O $BASE_DOMAIN/sitemap.xml -O $BASE_DOMAIN/robots.txt - - - name: Generate Sitemap and Robots.txt - uses: ./.github/actions/generate_sitemap_and_robots + - name: Generate Sitemap + uses: ./.github/actions/generate_sitemap with: main_domain: "https://deriv.com" + base_domain: "https://staging.deriv.com" - name: Upload to R2 uses: ./.github/actions/upload_to_r2 @@ -109,6 +103,7 @@ jobs: r2_access_key_id: ${{ secrets.R2_ACCESS_KEY_ID }} r2_secret_access_key: ${{ secrets.R2_SECRET_ACCESS_KEY }} r2_bucket_name: ${{ secrets.R2_BUCKET_NAME }} + source_dir: "./content" send_slack_failure: runs-on: ubuntu-latest From 27319bc51e0b3af3e3b6e6dea170f1a320493624 Mon Sep 17 00:00:00 2001 From: "Ali(Ako) Hosseini" Date: Fri, 19 Jul 2024 14:19:24 +0800 Subject: [PATCH 3/3] fix: change the file encoding --- .github/modify_robots.js | 4 ++-- .github/modify_sitemap.js | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/modify_robots.js b/.github/modify_robots.js index aaa84b4..39ccc3e 100644 --- a/.github/modify_robots.js +++ b/.github/modify_robots.js @@ -3,7 +3,7 @@ const yargs = require("yargs"); // Function to update Sitemap entry in robots.txt function updateSitemap(inputFile, newSitemapUrl) { - fs.readFile(inputFile, "utf8", (err, data) => { + fs.readFile(inputFile, "utf-8", (err, data) => { if (err) { console.error("Error reading the file:", err); return; @@ -22,7 +22,7 @@ function updateSitemap(inputFile, newSitemapUrl) { if (data.match(/^Host:/m)) { newContent = newContent.replace(/^Host: .*/m, `Host: https://${newSitemapUrl}`); } - fs.writeFile(inputFile, newContent, "utf8", (err) => { + fs.writeFile(inputFile, newContent, "utf-8", (err) => { if (err) { console.error("Error writing the file:", err); return; diff --git a/.github/modify_sitemap.js b/.github/modify_sitemap.js index c46bd2a..ee3cf8f 100644 --- a/.github/modify_sitemap.js +++ b/.github/modify_sitemap.js @@ -24,7 +24,7 @@ const newDomain = argv["new-domain"]; const inputFile = argv["input-file"]; // Define the new domain // Read the input file -fs.readFile(inputFile, "utf8", (err, data) => { +fs.readFile(inputFile, "utf-8", (err, data) => { if (err) { console.error("Error reading the file:", err); return; @@ -37,7 +37,7 @@ fs.readFile(inputFile, "utf8", (err, data) => { const newContent = data.replace(pattern, `https://${newDomain}`); // Write the modified content to the output file - fs.writeFile(inputFile, newContent, "utf8", (err) => { + fs.writeFile(inputFile, newContent, "utf-8", (err) => { if (err) { console.error("Error writing the file:", err); return;