Add build script

Download all the jsDelivr stuff, and use Parcel to compact it
This commit is contained in:
Hans5958 2022-04-28 13:10:22 +00:00 committed by GitHub
parent b92bb35407
commit 8d5f3c8bab
5 changed files with 97 additions and 1 deletions

9
.gitignore vendored
View file

@ -14,4 +14,11 @@ combined.js
.vscode/
web/atlas-before-ids-migration.json
*.pyc
tools/read-ids-temp.txt
tools/read-ids-temp.txt
.venv/
node_modules/
dist*/
package.json
package-lock.json
yarn.lock
.parcel-cache/

View file

@ -0,0 +1,3 @@
[build]
publish = "dist/"
command = "FILE=tools/ci/build-prod.sh; rm -rf dist/; if [ -f $FILE ]; then bash $FILE; else cp -r web/ dist/; fi"

16
tools/ci/build-prod.sh Normal file
View file

@ -0,0 +1,16 @@
# This command should be run on CI/Netlify enviroment!
# If you really wanted to run it, run it on the root.
rm -rf dist-temp
rm -rf dist
cp -r web/ dist-temp/
cp tools/ci/postcss.config.js ./
cp tools/ci/package.json ./
npm i
python tools/ci/cdn-to-local.py
npx parcel build dist-temp/index.html dist-temp/**.html --dist-dir "dist" --no-source-maps --no-content-hash
mkdir dist/_img
cp -r dist-temp/_img/canvas/ dist/_img/canvas/
cp dist-temp/atlas.json dist
rm -rf dist-temp
rm -rf postcss.config.js

48
tools/ci/cdn-to-local.py Normal file
View file

@ -0,0 +1,48 @@
import glob
import string
import re
import hashlib
import os
import urllib.request
cdns = []
def join_rel_path(path1, path2):
path = os.path.join(path1, path2)
path = re.sub(r"\/[^\/]+?\/\.", "", path)
return path
for name in glob.glob("web/**.html"):
with open(name, 'r', encoding='utf-8') as file:
file_string = file.read()
urls = re.findall('"(https:\/\/cdn.jsdelivr.net\/(.+?))"', file_string)
for url_groups in urls:
url: string = url_groups[0]
os.makedirs("dist-temp/cdn/" + hashlib.md5(url.encode()).hexdigest(), exist_ok=True)
new_url = "cdn/" + hashlib.md5(url.encode()).hexdigest() + "/" + os.path.basename(url)
print(url)
urllib.request.urlretrieve(url, "dist-temp/" + new_url)
file_string = file_string.replace(url, new_url)
cdns.append((url, new_url, hashlib.md5(url.encode()).hexdigest()))
# print(file_string).replace("\?.+$", "")
name = name.replace('web/', 'dist-temp/')
with open(name, 'w', encoding='utf-8') as file:
file.write(file_string)
for cdn in cdns:
parent_url, parent_new_url, hash = cdn
name = "dist-temp/" + parent_new_url
with open(name, 'r', encoding='utf-8') as file:
file_string = file.read()
urls = re.findall('\("(.\/(.+?))"\)', file_string)
for url_groups in urls:
url_orig = url_groups[0]
url: string = join_rel_path(parent_url, url_groups[0])
url = re.sub("\?.+$", "", url)
os.makedirs("dist-temp/cdn/" + hashlib.md5(url.encode()).hexdigest(), exist_ok=True)
new_url = "cdn/" + hashlib.md5(url.encode()).hexdigest() + "/" + os.path.basename(url)
print(url)
urllib.request.urlretrieve(url, "dist-temp/" + new_url)
file_string = file_string.replace(url_orig, new_url.replace("cdn/", "../"))
with open(name, 'w', encoding='utf-8') as file:
file.write(file_string)

View file

@ -0,0 +1,22 @@
const purgecss = require("@fullhuman/postcss-purgecss");
const plugins = [];
if (process.env.NODE_ENV === "production") {
plugins.push(
purgecss({
content: [
'./dist-temp/*.html',
'./dist-temp/**/*.html',
'./dist-temp/*.js',
'./dist-temp/**/*.js',
'./dist-temp/*.svg',
'./dist-temp/**/*.svg'
]
})
);
}
module.exports = {
plugins: plugins
};