Browse Source
This is done through parsing the HTML of the given sites, and reacting to a new sort of the indices on the given sites. The links are then updated, and a PR with the new links created. The bot is run once a week on a schedule Changelog: None Signed-off-by: Ole Petter <ole.orhagen@northern.tech>2.4.x
11 changed files with 2000 additions and 1 deletions
@ -0,0 +1,33 @@ |
|||||
|
name: automatically-update-test-links |
||||
|
on: |
||||
|
schedule: |
||||
|
- cron: "0 13 * * 1" |
||||
|
jobs: |
||||
|
createPullRequest: |
||||
|
runs-on: ubuntu-latest |
||||
|
steps: |
||||
|
- uses: actions/checkout@v2 |
||||
|
- uses: actions/setup-node@v2 |
||||
|
- name: Make changes to pull request |
||||
|
run: | |
||||
|
npm ci |
||||
|
node monitor-bb.js |
||||
|
node monitor-bb-sd.js |
||||
|
node monitor-rpi.js |
||||
|
node monitor-tinker.js |
||||
|
node monitor-ub-server.js |
||||
|
working-directory: scripts/linkbot |
||||
|
|
||||
|
- name: Create Pull Request |
||||
|
id: cpr |
||||
|
uses: peter-evans/create-pull-request@v3 |
||||
|
with: |
||||
|
title: "Image-Bot: New images available" |
||||
|
body: | |
||||
|
New images are available for update |
||||
|
commit-message: Image-Bot - Image updates |
||||
|
|
||||
|
- name: Check outputs |
||||
|
run: | |
||||
|
echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" |
||||
|
echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" |
@ -0,0 +1 @@ |
|||||
|
node_modules |
@ -0,0 +1,18 @@ |
|||||
|
const fs = require('fs') |
||||
|
const updateURLLink = (newLine, target) => { |
||||
|
try { |
||||
|
const data = fs.readFileSync('../test/run-tests.sh', 'utf8') |
||||
|
.replace(RegExp(`## Auto-update\n${target}=.*`), `## Auto-update\n${newLine}`) |
||||
|
fs.writeFile('../test/run-tests.sh', data, (err, data) => { |
||||
|
if (err) { |
||||
|
console.error(err) |
||||
|
} |
||||
|
}) |
||||
|
} catch (err) { |
||||
|
console.error(err) |
||||
|
process.exit(1) |
||||
|
} |
||||
|
} |
||||
|
module.exports = { |
||||
|
updateURLLink |
||||
|
} |
@ -0,0 +1,49 @@ |
|||||
|
const jsdom = require("jsdom"); |
||||
|
const { JSDOM } = jsdom; |
||||
|
const fs = require('fs') |
||||
|
const { updateURLLink } = require('./common'); |
||||
|
|
||||
|
const target = "BBB_DEBIAN_SDCARD_IMAGE_URL" |
||||
|
|
||||
|
var reg = "bone-debian-(?<version>[0-9]+\.[0-9]+)-iot-armhf-(?<date>[0-9]{4}-[0-9]{2}-[0-9]{1,2})-4gb.img.xz$" |
||||
|
|
||||
|
// Read the input file, and parse the variable input
|
||||
|
try { |
||||
|
const data = fs.readFileSync('../test/run-tests.sh', 'utf8') |
||||
|
.split('\n') |
||||
|
.filter(line => line.match(`${target}=.*`)) |
||||
|
var line = data[0] |
||||
|
var m = line.match(".*=\"(?<url>[a-zA-Z-://\._]*)(?<imageName>bone-debian-(?<version>[0-9]+\.[0-9]+)-iot-armhf-(?<date>[0-9]{4}-[0-9]{2}-[0-9]{1,2})-4gb.img.xz)") |
||||
|
var url = m.groups.url |
||||
|
var currentImageName = m.groups.imageName |
||||
|
} catch (err) { |
||||
|
console.error(err) |
||||
|
process.exit(1) |
||||
|
} |
||||
|
|
||||
|
JSDOM.fromURL(url, {}).then(dom => { |
||||
|
var document = dom.window.document; |
||||
|
var table = document.getElementById("list"); |
||||
|
var rows = table.rows; |
||||
|
var matches = Array.from(rows) |
||||
|
.filter(row => row.firstChild.textContent.match(reg)) |
||||
|
.reduce((acc, element) => { |
||||
|
var regMatch = element.firstChild.textContent.match(reg) |
||||
|
acc.push({ |
||||
|
text: element.firstChild.textContent, |
||||
|
version: regMatch.groups.version, |
||||
|
date: regMatch.groups.date, |
||||
|
}) |
||||
|
return acc |
||||
|
}, []) |
||||
|
.sort((a,b) => { |
||||
|
// The bone-debian setup has two parts which needs comparing:
|
||||
|
// * The release-version: i.e., 10.3
|
||||
|
// * The date: i.e., 2020-04-06
|
||||
|
return parseFloat(b.version) - parseFloat(a.version) || Date.parse(b.date) - Date.parse(a.date) |
||||
|
}) |
||||
|
if (matches[0].text !== currentImageName) { |
||||
|
console.error("We've got a new release! \\o/"); |
||||
|
updateURLLink(`${target}=\"${url}/${matches[0].text}\"`) |
||||
|
} |
||||
|
}); |
@ -0,0 +1,58 @@ |
|||||
|
const jsdom = require("jsdom"); |
||||
|
const { JSDOM } = jsdom; |
||||
|
const fs = require('fs') |
||||
|
const { updateURLLink } = require('./common'); |
||||
|
|
||||
|
const reg = "[0-9]{4}-[0-9]{2}-[0-9]{1,2}/" |
||||
|
const target = "BBB_DEBIAN_EMMC_IMAGE_URL" |
||||
|
|
||||
|
// Read the input file, and parse the variable input
|
||||
|
try { |
||||
|
const data = fs.readFileSync('../test/run-tests.sh', 'utf8') |
||||
|
.split('\n') |
||||
|
.filter(line => line.match(`${target}=.*`)) |
||||
|
var line = data[0] |
||||
|
var m = line.match(".*=\"(?<url>[a-zA-Z-://\.]*)(?<latestDate>[0-9]{4}-[0-9]{2}-[0-9]{1,2}/).*") |
||||
|
var url = m.groups.url |
||||
|
var latestDate = m.groups.latestDate |
||||
|
} catch (err) { |
||||
|
console.error(err) |
||||
|
process.exit(1) |
||||
|
} |
||||
|
|
||||
|
function getNewBoneDebian(url) { |
||||
|
return JSDOM.fromURL(url, {}).then(dom => { |
||||
|
var document = dom.window.document; |
||||
|
var refs = document.getElementsByTagName("a"); |
||||
|
var test = Array.from(refs) |
||||
|
.filter(ref => ref.textContent.match(("bone-debian.*\.img\.xz$"))) |
||||
|
.reduce((acc, element) => { |
||||
|
acc.push(element.textContent.match("bone-debian.*\.img\.xz$").input) |
||||
|
return acc |
||||
|
}, [])[0] |
||||
|
return `${target}=\"${url}/${test}\"`; |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
JSDOM.fromURL(url, {}).then(async dom => { |
||||
|
var document = dom.window.document; |
||||
|
var table = document.getElementsByTagName("table"); |
||||
|
var rows = table[0].rows; |
||||
|
var matches = Array.from(rows) |
||||
|
.filter(row => row.children.length == 5) |
||||
|
.filter(row => row.children[1].textContent.match(reg)) |
||||
|
.reduce((acc, row ) => { |
||||
|
acc.push(row.children[1].textContent) |
||||
|
return acc |
||||
|
}, []) |
||||
|
.sort((a,b) => { |
||||
|
return Date.parse(b) - Date.parse(a) |
||||
|
}) |
||||
|
if (matches[0] !== latestDate) { |
||||
|
console.error("We've got a new release! \\o/"); |
||||
|
var newVar = await getNewBoneDebian(`${url}${matches[0]}buster-console`) |
||||
|
if (newVar) { |
||||
|
updateURLLink(newVar, target) |
||||
|
} |
||||
|
} |
||||
|
}); |
@ -0,0 +1,54 @@ |
|||||
|
const jsdom = require("jsdom"); |
||||
|
const { JSDOM } = jsdom; |
||||
|
const fs = require('fs') |
||||
|
const { updateURLLink } = require('./common'); |
||||
|
|
||||
|
const target = "RASPBIAN_IMAGE_URL" |
||||
|
|
||||
|
// Read the input file, and parse the variable input
|
||||
|
try { |
||||
|
const data = fs.readFileSync('../test/run-tests.sh', 'utf8') |
||||
|
.split('\n') |
||||
|
.filter(line => line.match(`${target}=.*`)) |
||||
|
var line = data[0] |
||||
|
console.log(line) |
||||
|
var reg = "raspbian_lite-(?<date>[0-9]{4}-[0-9]{2}-[0-9]{1,2})/(?<updated>[0-9]{4}-[0-9]{2}-[0-9]{1,2}).*$" |
||||
|
var m = line.match(".*=\"(?<url>[a-zA-Z-://\._]*)(?<imageName>raspbian_lite-[0-9]{4}-[0-9]{2}-[0-9]{1,2})/(?<updated>[0-9]{4}-[0-9]{2}-[0-9]{1,2}).*$") |
||||
|
console.log(m) |
||||
|
var url = m.groups.url |
||||
|
var imageName = m.groups.imageName |
||||
|
var updated = m.groups.updated |
||||
|
} catch (err) { |
||||
|
console.error(err) |
||||
|
process.exit(1) |
||||
|
} |
||||
|
|
||||
|
JSDOM.fromURL(url, {}).then(dom => { |
||||
|
var document = dom.window.document; |
||||
|
var table = document.getElementsByTagName("table"); |
||||
|
var rows = table[0].rows; |
||||
|
var matches = []; |
||||
|
for (var i=0; i< rows.length; i++) { |
||||
|
var rowText = rows[i].textContent; |
||||
|
var regMatch = rowText.match(reg); |
||||
|
if (regMatch) { |
||||
|
matches.push(regMatch); |
||||
|
} |
||||
|
} |
||||
|
// Sort the accumulated matches
|
||||
|
matches.sort(function(a,b) { |
||||
|
let al = Date.parse(a.groups.date); |
||||
|
let bl = Date.parse(b.groups.date); |
||||
|
if (al == bl) { |
||||
|
let ad = Date.parse(a.groups.updated); |
||||
|
let bd = Date.parse(b.groups.updated); |
||||
|
return bd - ad; |
||||
|
} |
||||
|
return bl - al; |
||||
|
}); |
||||
|
var matchOn = matches[0].input.split("/")[0] |
||||
|
if (matchOn !== imageName) { |
||||
|
console.error("We've got a new release! \\o/"); |
||||
|
updateURLLink(`${target}=\"${url}${matches[0].input.split(" ")[0]}-raspbian-buster-lite.zip\"`, target) |
||||
|
} |
||||
|
}); |
@ -0,0 +1,52 @@ |
|||||
|
const jsdom = require("jsdom"); |
||||
|
const { JSDOM } = jsdom; |
||||
|
const bent = require('bent') |
||||
|
const getJSON = bent('json') |
||||
|
const fs = require('fs') |
||||
|
const { updateURLLink } = require('./common'); |
||||
|
|
||||
|
const target = "TINKER_IMAGE_URL" |
||||
|
const url = "https://tinker-board.asus.com/download-list.html?product=tinker-board" |
||||
|
let versionRegexp = "[vV](?<major>[0-9]{1,2}\.(?<minor>[0-9]{1,2})\.(?<patch>[0-9]{1,2}))" |
||||
|
const reg = ".*[Vv](?<major>[0-9]{1,2})\.(?<minor>[0-9]{1,2})\.(?<patch>[0-9]{1,2})?.*" |
||||
|
|
||||
|
// Read the input file, and parse the variable input
|
||||
|
try { |
||||
|
const data = fs.readFileSync('../test/run-tests.sh', 'utf8') |
||||
|
.split('\n') |
||||
|
.filter(line => line.match(`${target}=.*`)) |
||||
|
var line = data[0] |
||||
|
var m = line.match(`.*=\"${reg}`) |
||||
|
console.log(m) |
||||
|
var major = m.groups.major |
||||
|
var minor = m.groups.minor |
||||
|
var patch = m.groups.patch || 0 |
||||
|
} catch (err) { |
||||
|
console.error(err) |
||||
|
process.exit(1) |
||||
|
} |
||||
|
|
||||
|
let obj = getJSON("https://www.asus.com/support/api/product.asmx/GetPDDrivers?cpu=&osid=8&website=global&pdhashedid=xOd5XdS4L5c6tt1O&model=Tinker%20Board%20S").then(result => { |
||||
|
result.Result.Obj[0].Files.push({ |
||||
|
Title: "TinkerOS_Debian", |
||||
|
Version: `V${major}.${minor}.${patch}`, |
||||
|
}) |
||||
|
let matches = result.Result.Obj[0].Files.filter(obj => obj.Title.match("TinkerOS_Debian")) |
||||
|
.sort((a,b) => { |
||||
|
let matchA = a.Version.match(versionRegexp) |
||||
|
let matchB = b.Version.match(versionRegexp) |
||||
|
if (matchA && matchB) { |
||||
|
return parseInt(matchB.major) - parseInt(matchA.major) || |
||||
|
parseInt(matchB.minor) - parseInt(matchA.minor) || |
||||
|
parseInt(matchB.patch) - parseInt(matchA.patch) |
||||
|
} |
||||
|
}) |
||||
|
console.log("matches") |
||||
|
console.log(matches) |
||||
|
|
||||
|
// New version
|
||||
|
if (matches[0].DownloadUrl) { |
||||
|
console.log(`${target}=${matches[0].DownloadUrl.Global}`) |
||||
|
updateURLLink(`${target}=\"${matches[0].DownloadUrl.Global}\"`, target) |
||||
|
} |
||||
|
}) |
@ -0,0 +1,46 @@ |
|||||
|
const jsdom = require("jsdom"); |
||||
|
const { JSDOM } = jsdom; |
||||
|
const fs = require('fs') |
||||
|
const { updateURLLink } = require('./common'); |
||||
|
|
||||
|
const url = "http://cdimage.ubuntu.com/ubuntu/releases/" |
||||
|
const reg = ".*(?<release>[0-9]{2})\.04\.?(?<minor>[0-9]{1})?.*" |
||||
|
|
||||
|
// Read the input file, and parse the variable input
|
||||
|
try { |
||||
|
const data = fs.readFileSync('../test/run-tests.sh', 'utf8') |
||||
|
.split('\n') |
||||
|
.filter(line => line.match("UBUNTU_SERVER_RPI_IMAGE_URL=.*")) |
||||
|
var line = data[0] |
||||
|
var m = line.match(`.*=\"${reg}\"`) |
||||
|
var imageName = m.groups.release |
||||
|
var minor = m.groups.minor || 0 |
||||
|
} catch (err) { |
||||
|
console.error(err) |
||||
|
process.exit(1) |
||||
|
} |
||||
|
|
||||
|
JSDOM.fromURL(url, {}).then(dom => { |
||||
|
var document = dom.window.document; |
||||
|
var refs = document.getElementsByTagName("a"); |
||||
|
var matches = Array.from(refs) |
||||
|
.filter(ref => ref.textContent.match(reg)) |
||||
|
.reduce((acc, ref) => { |
||||
|
acc.push(ref.textContent.match(reg)) |
||||
|
return acc |
||||
|
}, []) |
||||
|
.sort((a,b) => { |
||||
|
return parseInt(b.groups.release) - parseInt(a.groups.release) || parseFloat(b.groups.minor) - parseFloat(a.groups.minor) |
||||
|
}) |
||||
|
var matchOn = matches[0].input |
||||
|
if (matchOn !== imageName) { |
||||
|
console.log("We've got a new release! \\o/"); |
||||
|
var newLine = "" |
||||
|
if (matches[0].groups.minor) { |
||||
|
newLine = `UBUNTU_SERVER_RPI_IMAGE_URL=\"${url}${matches[0].groups.release}.04.${matches[0].groups.minor}/release/ubuntu-${matches[0].groups.release}.04.${matches[0].groups.minor}-preinstalled-server-armhf+raspi.img.xz\"` |
||||
|
} else { |
||||
|
newLine = `UBUNTU_SERVER_RPI_IMAGE_URL=\"${url}${matches[0].groups.release}.04/release/ubuntu-${matches[0].groups.release}.04-preinstalled-server-armhf+raspi.img.xz\"` |
||||
|
} |
||||
|
updateURLLink(newLine) |
||||
|
} |
||||
|
}); |
File diff suppressed because it is too large
@ -0,0 +1,6 @@ |
|||||
|
{ |
||||
|
"dependencies": { |
||||
|
"bent": "^7.3.12", |
||||
|
"jsdom": "^16.4.0" |
||||
|
} |
||||
|
} |
Loading…
Reference in new issue