posts and basic latex

This commit is contained in:
Rachel Lambda Samuelsson 2024-07-16 01:14:04 +02:00
parent 2cfc466efd
commit 9d8395b7b4
17 changed files with 1469 additions and 17 deletions

13
env.d.ts vendored
View File

@ -1,2 +1,15 @@
/// <reference types="vite/client" />
/// <reference types="vite-plugin-pages/client" />
declare module '*.vue' {
import type { ComponentOptions } from 'vue'
const Component: ComponentOptions
export default Component
}
declare module '*.md' {
import type { ComponentOptions } from 'vue'
const Component: ComponentOptions
export default Component
}

341
package-lock.json generated
View File

@ -8,10 +8,10 @@
"name": "rachel-blog-vue",
"version": "0.0.0",
"dependencies": {
"sass": "^1.77.8",
"vue": "^3.4.29"
},
"devDependencies": {
"@traptitech/markdown-it-katex": "^3.6.0",
"@tsconfig/node20": "^20.1.4",
"@types/node": "^20.14.5",
"@unhead/vue": "^1.9.16",
@ -19,9 +19,12 @@
"@vue/language-server": "^2.0.26",
"@vue/tsconfig": "^0.5.1",
"@vue/typescript-plugin": "^2.0.26",
"katex": "^0.16.11",
"npm-run-all2": "^6.2.0",
"sass": "^1.77.8",
"typescript": "~5.4.0",
"typescript-language-server": "^4.3.3",
"unplugin-vue-markdown": "^0.26.2",
"vite": "^5.3.1",
"vite-plugin-pages": "^0.32.3",
"vite-ssg": "^0.23.8",
@ -525,6 +528,34 @@
"@jridgewell/sourcemap-codec": "^1.4.14"
}
},
"node_modules/@mdit-vue/plugin-component": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@mdit-vue/plugin-component/-/plugin-component-2.1.3.tgz",
"integrity": "sha512-9AG17beCgpEw/4ldo/M6Y/1Rh4E1bqMmr/rCkWKmCAxy9tJz3lzY7HQJanyHMJufwsb3WL5Lp7Om/aPcQTZ9SA==",
"dev": true,
"dependencies": {
"@types/markdown-it": "^14.1.1",
"markdown-it": "^14.1.0"
}
},
"node_modules/@mdit-vue/plugin-frontmatter": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@mdit-vue/plugin-frontmatter/-/plugin-frontmatter-2.1.3.tgz",
"integrity": "sha512-KxsSCUVBEmn6sJcchSTiI5v9bWaoRxe68RBYRDGcSEY1GTnfQ5gQPMIsM48P4q1luLEIWurVGGrRu7u93//LDQ==",
"dev": true,
"dependencies": {
"@mdit-vue/types": "2.1.0",
"@types/markdown-it": "^14.1.1",
"gray-matter": "^4.0.3",
"markdown-it": "^14.1.0"
}
},
"node_modules/@mdit-vue/types": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@mdit-vue/types/-/types-2.1.0.tgz",
"integrity": "sha512-TMBB/BQWVvwtpBdWD75rkZx4ZphQ6MN0O4QB2Bc0oI5PC2uE57QerhNxdRZ7cvBHE2iY2C+BUNUziCfJbjIRRA==",
"dev": true
},
"node_modules/@nodelib/fs.scandir": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
@ -560,6 +591,28 @@
"node": ">= 8"
}
},
"node_modules/@rollup/pluginutils": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.1.0.tgz",
"integrity": "sha512-XTIWOPPcpvyKI6L1NHo0lFlCyznUEyPmPY1mc3KpPVDYulHSTvyeLNVW00QTLIAFNhR3kYnJTQHeGqU4M3n09g==",
"dev": true,
"dependencies": {
"@types/estree": "^1.0.0",
"estree-walker": "^2.0.2",
"picomatch": "^2.3.1"
},
"engines": {
"node": ">=14.0.0"
},
"peerDependencies": {
"rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0"
},
"peerDependenciesMeta": {
"rollup": {
"optional": true
}
}
},
"node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.18.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.18.1.tgz",
@ -768,6 +821,15 @@
"win32"
]
},
"node_modules/@traptitech/markdown-it-katex": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/@traptitech/markdown-it-katex/-/markdown-it-katex-3.6.0.tgz",
"integrity": "sha512-CnJzTWxsgLGXFdSrWRaGz7GZ1kUUi8g3E9HzJmeveX1YwVJavrKYqysktfHZQsujdnRqV5O7g8FPKEA/aeTkOQ==",
"dev": true,
"dependencies": {
"katex": "^0.16.0"
}
},
"node_modules/@tsconfig/node20": {
"version": "20.1.4",
"resolved": "https://registry.npmjs.org/@tsconfig/node20/-/node20-20.1.4.tgz",
@ -789,6 +851,28 @@
"integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==",
"dev": true
},
"node_modules/@types/linkify-it": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz",
"integrity": "sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==",
"dev": true
},
"node_modules/@types/markdown-it": {
"version": "14.1.1",
"resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.1.tgz",
"integrity": "sha512-4NpsnpYl2Gt1ljyBGrKMxFYAYvpqbnnkgP/i/g+NLpjEUa3obn1XJCur9YbEXKDAkaXqsR1LbDnGEJ0MmKFxfg==",
"dev": true,
"dependencies": {
"@types/linkify-it": "^5",
"@types/mdurl": "^2"
}
},
"node_modules/@types/mdurl": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-2.0.0.tgz",
"integrity": "sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==",
"dev": true
},
"node_modules/@types/ms": {
"version": "0.7.34",
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz",
@ -1199,6 +1283,7 @@
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
"integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
"dev": true,
"dependencies": {
"normalize-path": "^3.0.0",
"picomatch": "^2.0.4"
@ -1207,6 +1292,15 @@
"node": ">= 8"
}
},
"node_modules/argparse": {
"version": "1.0.10",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
"integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
"dev": true,
"dependencies": {
"sprintf-js": "~1.0.2"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
@ -1223,6 +1317,7 @@
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
"integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==",
"dev": true,
"engines": {
"node": ">=8"
},
@ -1243,6 +1338,7 @@
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"dependencies": {
"fill-range": "^7.1.1"
},
@ -1298,6 +1394,7 @@
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
"integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
"dev": true,
"dependencies": {
"anymatch": "~3.1.2",
"braces": "~3.0.2",
@ -1649,6 +1746,18 @@
"resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz",
"integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w=="
},
"node_modules/extend-shallow": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz",
"integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==",
"dev": true,
"dependencies": {
"is-extendable": "^0.1.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/extract-comments": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/extract-comments/-/extract-comments-1.1.0.tgz",
@ -1691,6 +1800,7 @@
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"dependencies": {
"to-regex-range": "^5.0.1"
},
@ -1730,6 +1840,7 @@
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"dev": true,
"hasInstallScript": true,
"optional": true,
"os": [
@ -1780,6 +1891,7 @@
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
"dev": true,
"dependencies": {
"is-glob": "^4.0.1"
},
@ -1805,6 +1917,21 @@
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
"dev": true
},
"node_modules/gray-matter": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz",
"integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==",
"dev": true,
"dependencies": {
"js-yaml": "^3.13.1",
"kind-of": "^6.0.2",
"section-matter": "^1.0.0",
"strip-bom-string": "^1.0.0"
},
"engines": {
"node": ">=6.0"
}
},
"node_modules/has-property-descriptors": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
@ -1966,12 +2093,14 @@
"node_modules/immutable": {
"version": "4.3.6",
"resolved": "https://registry.npmjs.org/immutable/-/immutable-4.3.6.tgz",
"integrity": "sha512-Ju0+lEMyzMVZarkTn/gqRpdqd5dOPaz1mCZ0SH3JV6iFw81PldE/PEB1hWVEA288HPt4WXW8O7AWxB10M+03QQ=="
"integrity": "sha512-Ju0+lEMyzMVZarkTn/gqRpdqd5dOPaz1mCZ0SH3JV6iFw81PldE/PEB1hWVEA288HPt4WXW8O7AWxB10M+03QQ==",
"dev": true
},
"node_modules/is-binary-path": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
"integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
"dev": true,
"dependencies": {
"binary-extensions": "^2.0.0"
},
@ -1989,10 +2118,20 @@
"object-assign": "^4.1.1"
}
},
"node_modules/is-extendable": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz",
"integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-extglob": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
@ -2010,6 +2149,7 @@
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
"dev": true,
"dependencies": {
"is-extglob": "^2.1.1"
},
@ -2021,6 +2161,7 @@
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true,
"engines": {
"node": ">=0.12.0"
}
@ -2053,6 +2194,19 @@
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
"dev": true
},
"node_modules/js-yaml": {
"version": "3.14.1",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
"integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
"dev": true,
"dependencies": {
"argparse": "^1.0.7",
"esprima": "^4.0.0"
},
"bin": {
"js-yaml": "bin/js-yaml.js"
}
},
"node_modules/jsdom": {
"version": "24.1.0",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-24.1.0.tgz",
@ -2132,12 +2286,55 @@
"graceful-fs": "^4.1.6"
}
},
"node_modules/katex": {
"version": "0.16.11",
"resolved": "https://registry.npmjs.org/katex/-/katex-0.16.11.tgz",
"integrity": "sha512-RQrI8rlHY92OLf3rho/Ts8i/XvjgguEjOkO1BEXcU3N8BqPpSzBNwV/G0Ukr+P/l3ivvJUE/Fa/CwbS6HesGNQ==",
"dev": true,
"funding": [
"https://opencollective.com/katex",
"https://github.com/sponsors/katex"
],
"dependencies": {
"commander": "^8.3.0"
},
"bin": {
"katex": "cli.js"
}
},
"node_modules/katex/node_modules/commander": {
"version": "8.3.0",
"resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz",
"integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==",
"dev": true,
"engines": {
"node": ">= 12"
}
},
"node_modules/kind-of": {
"version": "6.0.3",
"resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
"integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/kolorist": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/kolorist/-/kolorist-1.8.0.tgz",
"integrity": "sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ==",
"dev": true
},
"node_modules/linkify-it": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz",
"integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==",
"dev": true,
"dependencies": {
"uc.micro": "^2.0.0"
}
},
"node_modules/local-pkg": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/local-pkg/-/local-pkg-0.5.0.tgz",
@ -2171,6 +2368,35 @@
"@jridgewell/sourcemap-codec": "^1.4.15"
}
},
"node_modules/markdown-it": {
"version": "14.1.0",
"resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz",
"integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==",
"dev": true,
"dependencies": {
"argparse": "^2.0.1",
"entities": "^4.4.0",
"linkify-it": "^5.0.0",
"mdurl": "^2.0.0",
"punycode.js": "^2.3.1",
"uc.micro": "^2.1.0"
},
"bin": {
"markdown-it": "bin/markdown-it.mjs"
}
},
"node_modules/markdown-it/node_modules/argparse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
"dev": true
},
"node_modules/mdurl": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz",
"integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==",
"dev": true
},
"node_modules/memorystream": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz",
@ -2305,6 +2531,7 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
"integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
@ -2429,6 +2656,7 @@
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
"dev": true,
"engines": {
"node": ">=8.6"
},
@ -2543,6 +2771,15 @@
"node": ">=6"
}
},
"node_modules/punycode.js": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz",
"integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==",
"dev": true,
"engines": {
"node": ">=6"
}
},
"node_modules/querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
@ -2586,6 +2823,7 @@
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
"integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
"dev": true,
"dependencies": {
"picomatch": "^2.2.1"
},
@ -2707,6 +2945,7 @@
"version": "1.77.8",
"resolved": "https://registry.npmjs.org/sass/-/sass-1.77.8.tgz",
"integrity": "sha512-4UHg6prsrycW20fqLGPShtEvo/WyHRVRHwOP4DzkUrObWoWI05QBSfzU71TVB7PFaL104TwNaHpjlWXAZbQiNQ==",
"dev": true,
"dependencies": {
"chokidar": ">=3.0.0 <4.0.0",
"immutable": "^4.0.0",
@ -2731,6 +2970,19 @@
"node": ">=v12.22.7"
}
},
"node_modules/section-matter": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz",
"integrity": "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==",
"dev": true,
"dependencies": {
"extend-shallow": "^2.0.1",
"kind-of": "^6.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/semver": {
"version": "7.6.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
@ -2817,6 +3069,12 @@
"source-map": "^0.6.0"
}
},
"node_modules/sprintf-js": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
"integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
"dev": true
},
"node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
@ -2843,6 +3101,15 @@
"node": ">=8"
}
},
"node_modules/strip-bom-string": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz",
"integrity": "sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/symbol-tree": {
"version": "3.2.4",
"resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz",
@ -2889,6 +3156,7 @@
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
"dev": true,
"dependencies": {
"is-number": "^7.0.0"
},
@ -2978,6 +3246,12 @@
"node": ">=18"
}
},
"node_modules/uc.micro": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz",
"integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==",
"dev": true
},
"node_modules/ufo": {
"version": "1.5.3",
"resolved": "https://registry.npmjs.org/ufo/-/ufo-1.5.3.tgz",
@ -3014,6 +3288,54 @@
"node": ">= 10.0.0"
}
},
"node_modules/unplugin": {
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/unplugin/-/unplugin-1.11.0.tgz",
"integrity": "sha512-3r7VWZ/webh0SGgJScpWl2/MRCZK5d3ZYFcNaeci/GQ7Teop7zf0Nl2pUuz7G21BwPd9pcUPOC5KmJ2L3WgC5g==",
"dev": true,
"dependencies": {
"acorn": "^8.11.3",
"chokidar": "^3.6.0",
"webpack-sources": "^3.2.3",
"webpack-virtual-modules": "^0.6.1"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/unplugin-vue-markdown": {
"version": "0.26.2",
"resolved": "https://registry.npmjs.org/unplugin-vue-markdown/-/unplugin-vue-markdown-0.26.2.tgz",
"integrity": "sha512-FjmhLZ+RRx7PFmfBCTwNUZLAj0Y9z0y/j79rTgYuXH9u+K6tZBFB+GpFFBm+4yMQ0la3MNCl7KHbaSvfna2bEA==",
"dev": true,
"dependencies": {
"@mdit-vue/plugin-component": "^2.1.2",
"@mdit-vue/plugin-frontmatter": "^2.1.2",
"@mdit-vue/types": "^2.1.0",
"@rollup/pluginutils": "^5.1.0",
"@types/markdown-it": "^14.0.1",
"markdown-it": "^14.1.0",
"unplugin": "^1.10.1"
},
"funding": {
"url": "https://github.com/sponsors/antfu"
},
"peerDependencies": {
"vite": "^2.0.0 || ^3.0.0-0 || ^4.0.0-0 || ^5.0.0-0"
}
},
"node_modules/unplugin/node_modules/acorn": {
"version": "8.12.1",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz",
"integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==",
"dev": true,
"bin": {
"acorn": "bin/acorn"
},
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
@ -3484,6 +3806,21 @@
"node": ">=12"
}
},
"node_modules/webpack-sources": {
"version": "3.2.3",
"resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz",
"integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==",
"dev": true,
"engines": {
"node": ">=10.13.0"
}
},
"node_modules/webpack-virtual-modules": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.2.tgz",
"integrity": "sha512-66/V2i5hQanC51vBQKPH4aI8NMAcBW59FVBs+rC7eGHupMyfn34q7rZIE+ETlJ+XTevqfUhVVBgSUNSW2flEUQ==",
"dev": true
},
"node_modules/whatwg-encoding": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz",

View File

@ -11,10 +11,10 @@
"type-check": "vue-tsc --build --force"
},
"dependencies": {
"sass": "^1.77.8",
"vue": "^3.4.29"
},
"devDependencies": {
"@traptitech/markdown-it-katex": "^3.6.0",
"@tsconfig/node20": "^20.1.4",
"@types/node": "^20.14.5",
"@unhead/vue": "^1.9.16",
@ -22,9 +22,12 @@
"@vue/language-server": "^2.0.26",
"@vue/tsconfig": "^0.5.1",
"@vue/typescript-plugin": "^2.0.26",
"katex": "^0.16.11",
"npm-run-all2": "^6.2.0",
"sass": "^1.77.8",
"typescript": "~5.4.0",
"typescript-language-server": "^4.3.3",
"unplugin-vue-markdown": "^0.26.2",
"vite": "^5.3.1",
"vite-plugin-pages": "^0.32.3",
"vite-ssg": "^0.23.8",

View File

@ -1,3 +0,0 @@
<template>
mjau mjau EXAMPLE
</template>

22
src/components/Link.vue Normal file
View File

@ -0,0 +1,22 @@
<script setup lang="ts">
const props = defineProps<{
to: {
title : string,
url : string,
path : string
}
}>()
</script>
<template>
<ClientOnly>
<RouterLink :to="props.to.path">
{{ props.to.title }}
</RouterLink>
<template #placeholder>
<a :href="props.to.url">
{{ props.to.title }}
</a>
</template>
</ClientOnly>
</template>

View File

@ -1,6 +1,3 @@
<script setup lang="ts">
</script>
<template>
<div class="sitetitle">
<div class="titlecontainer">

View File

@ -0,0 +1,22 @@
<script setup lang="ts">
const props = defineProps<{
date: Date
title: string
}>()
</script>
<template>
<article>
<slot>
post content goes here
</slot>
</article>
<hr>
<ul>
<li>Originally authored: {{ props.date.toLocaleDateString("en-US") }}</li>
<ul>
<hr>
</template>
<style scoped>
</style>

View File

@ -1,8 +1,14 @@
<script setup lang="ts">
import posts from '../posts'
import Link from '../components/Link.vue'
</script>
<template>
mjau mjau posts here
<ul>
<li v-for="post in posts">
<Link :to="post"/>
</li>
</ul>
</template>
<style scoped>

View File

@ -1,14 +1,17 @@
import './main.scss'
import 'katex/dist/katex.min.css'
import { ViteSSG } from 'vite-ssg'
import routes from '~pages'
import type { ComponentOptions } from 'vue'
import App from './App.vue'
import Ex from './components/Ex.vue'
import posts from './posts'
const allRoutes = routes.concat([
{ path: '/blog/mjau', component: Ex },
])
const allRoutes = routes.concat(posts.map((post : ComponentOptions) => ({
path: post.path,
component: post.component
})))
export const createApp = ViteSSG(
App,

View File

@ -0,0 +1,135 @@
---
title: "A favorite proof of mine"
date: "2023-03-06"
---
There are a lot of proofs in mathematics. Many of them serve to verify what we intuitively know to be true, some of them shed light on new methods, and some reveal new ways to view old ideas. Some proofs leave us with a headache, some leave us bored, and some leave us with wonder. In this blog post I will share a beautiful proof leading to a closed formula for the $n$-th Fibonacci number, taking us on a detour into functional programming and linear algebra.
<!--more-->
# The Fibonacci Numbers
The Fibonacci numbers are a sequence of numbers starting with a zero followed by a one where each consequent number is the sum of the last two: $0, 1, 1, 2, 3, 5, 8, 13 \dots$. If we wanted to be more precise we could define a mathematical sequence $\{f\}_{n=0}^{\infty}$ by the following recursive definition:
$$
f_n = \begin{cases}
0\;\textrm{if}\;n=0 \\
1\;\textrm{if}\;n=1 \\
f_{n-2} + f_{n-1}\;\textrm{otherwise}
\end{cases}
$$
The Fibonacci numbers have become a bit of a poster child for recursive definitions, perhaps due to it's simplicity. You'll surely find it in the early chapters of most books teaching functional programming (a programming paradigm where recursive definitions are common).
Indeed, if we open [Chapter 5: Recursion](https://learnyouahaskell.github.io/recursion.html) of [LYAH](https://learnyouahaskell.github.io/) we are greeted with the following.
> Definitions in mathematics are often given recursively. For instance, the Fibonacci sequence is defined recursively.
Likewise, in [Chapter 1.2.2 Tree Recursion](https://mitp-content-server.mit.edu/books/content/sectbyfn/books_pres_0/6515/sicp.zip/full-text/book/book-Z-H-4.html#%_toc_%_sec_1.2.2) of [SICP](https://mitp-content-server.mit.edu/books/content/sectbyfn/books_pres_0/6515/sicp.zip/index.html) we are yet again greeted by a familiar face.
> Another common pattern of computation is called tree recursion. As an example, consider computing the sequence of Fibonacci numbers
With this in mind, it might come as a surprise that there is a closed, non-recursive, formula for the $n$-th Fibonacci number. Perhaps more surprising is that we will discover this formula by using the ideas presented in the above chapter of SICP.
# Programmatically calculating the $n$-th Fibonacci number
A naive way of calculating the $n$-th Fibonacci number is to use the definition above. Check if $n = 0$, if $n = 1$, and otherwise calculating $f_{n-2}$ and $f_{n-1}$. This corresponds to the following Haskell code:
```hs
fib :: Integer -> Integer
fib 0 = 0
fib 1 = 1
fib n = fib (n-2) + fib (n-1)
```
However, there is an issue with this method, many Fibonacci numbers will be calculated numerous times, as for each Fibonacci number evaluated we split into two paths, evaluating the previous and twice previous Fibonacci number. The reader which prefers visuals might appreciate Figure 1.5 from the SICP chapter.
How might we fix this then? A human calculating the $n$-th Fibonacci number might construct a list of Fibonacci numbers, calculating each Fibonacci number only once. While it is possible to do this on the computer it is superfluous to carry around all previous numbers, as we only need the previous two to calculate the next one. We might think of this as a 2-slot window, moving along the Fibonacci numbers, taking $n$ steps to arrive at $f_n$. In code we could represent this as follows:
```hs
-- steps -> f n-2 -> f n-1 -> f n
window :: Integer -> Integer -> Integer -> Integer
window 0 a b = a
window steps a b = window (steps-1) b (a+b)
fib :: Integer -> Integer
fib n = window n 0 1
```
In each step we move the window by replacing the first slot in the window by what was previously in the second slot and filling the new second slot of the window with the sum of the previous two slots. This is then repeated $n$ times, and then the first slot of the window is returned.
# Mathematically calculating the $n$-th Fibonacci number
What does this have to do with mathematics, and this beautiful proof that I have promised? We shall begin to translate this moving window into the language of mathematics, our window is a pair of numbers, so why not represent it as a vector. Furthermore, we may view sliding our window one step as a function $S$ from vectors to vectors. This poses an interesting question: is this function a linear transformation?
$$
S \left(\begin{bmatrix} a & b \end{bmatrix}\right) \\
+ S\left(\begin{bmatrix} c & d \end{bmatrix}\right) \\
= \begin{bmatrix} b a + b \end{bmatrix} \\
+ \begin{bmatrix} d c + d \end{bmatrix}
$$
$$ = \begin{bmatrix} b + d \\ a + b + c + d \end{bmatrix} = $$
$$ S\left(\begin{bmatrix} a + c \\ b + d \end{bmatrix}\right) = S\left(\begin{bmatrix} a \\ b \end{bmatrix} + \begin{bmatrix} c \\ d \end{bmatrix}\right)$$
$$ S\left(\alpha \begin{bmatrix} a \\ b \end{bmatrix}\right) = \begin{bmatrix} \alpha b \\ \alpha (a + b) \end{bmatrix} = \alpha \begin{bmatrix} b \\ a + b \end{bmatrix} = \alpha S\left(\begin{bmatrix} a \\ b \end{bmatrix}\right) $$
It is! This is great news as it means we can represent our step function by a matrix. With some basic linear algebra one can deduce that
$$ S = \begin{bmatrix} 0 & 1 \\ 1 & 1 \end{bmatrix} $$
Then to calculate the $n$-th Fibonacci number we take the starting window $\begin{bmatrix} 0 \\ 1 \end{bmatrix}$ and multiply it by $S^n$. Now that the sliding window has been expressed purely in the language of linear algebra we may apply the tools of linear algebra.
# Applying the tools of linear algebra
If you're familiar with linear algebra there might be a part of your brain yelling "diagonalization" right now. We've translated our problem into linear algebra, but even for a small matrix calculating $S^n$ can become costly for high $n$. Diagonalization is a technique in which we express a matrix in a base where all base vectors are eigenvectors of the original matrix. The benefit of doing this is that it turns exponentiation of matrices, which is hard to calculate into exponentiation of scalars, which is much easier to calculate.
An eigenvector for our matrix $S$ is a vector $\hat v$ for which $S \hat v = \lambda \hat v$ for some scalar $\lambda$, which we call an eigenvalue. If there are any such vectors we can find them using their definition.
$$ S \hat v = \lambda \hat v = \lambda I_2 \hat v $$
Subtracting $\lambda I_2 v$ from both sides yields:
$$ 0 = S \hat v - \lambda I_2 \hat v = (S-\lambda I_2) \hat v $$
An equation of the form $0 = A \hat u$ will only have non-trivial solutions if the column vectors of $A$ are linearly dependent, that is if $\textrm{det}(A) = 0$. Thus we can find all scalars $\lambda$ for which there are non-trivial vector solutions by solving $\textrm{det}(S-\lambda I_2) = 0$. Because of this property the polynomial $\textrm{det}(A-\lambda I)$ is called the characteristic polynomial of $A$.
In our case we have the following:
$$ \textrm{det}(S-\lambda I_2) = \begin{vmatrix} - \lambda & 1 \\ 1 & 1 - \lambda \end{vmatrix} = \lambda^2 - \lambda - 1 = 0$$
Solving for $\lambda$ yields two eigenvalues:
$$ \lambda_0 = \frac{1 - \sqrt 5}{2} ,\; \lambda_1 = \frac{1 + \sqrt 5}{2}$$
Would you look at that, $\frac{1 + \sqrt 5}{2}$, the golden ratio! Some of you might already know that the golden ratio is connected to the Fibonacci numbers, in fact, as you get further and further into the sequence of the Fibonacci numbers the ratio $\frac{f_{n+1}}{f_n}$ approaches $\frac{1 + \sqrt 5}{2}$.
Now we can solve $(S-\lambda I_2) \hat v = 0$ for $\lambda_0$ and $\lambda_1$, and if the two resulting vectors are linearly independent we may use them as the basis of our diagonalization matrix. Gauss elimination yields:
$$ \hat v_0 = \begin{bmatrix} -2 \\ \sqrt 5 - 1 \end{bmatrix},\; \hat v_1 = \begin{bmatrix} 2 \\ \sqrt 5 + 1 \end{bmatrix} $$
These vectors are indeed linearly independent, and we can use them as basis vectors for our diagonal matrix. We will now to write $S = BDB^{-1}$ where
$ B = \begin{bmatrix} -2 & 2 \\ \sqrt 5 - 1 & \sqrt 5 + 1 \end{bmatrix}$
$ D = \begin{bmatrix} \frac{1 - \sqrt 5}{2} & 0 \\ 0 & \frac{1 + \sqrt 5}{2} \end{bmatrix}$
We then have that
$$S^n = (BDB^{-1})^n = \underbrace{BDB^{-1} BDB^{-1} \dots BDB^{-1}}_n $$
$$ = \underbrace{BDIDI \dots DB^{-1}}_n = BD^nB^{-1}$$
Which is very nice since
$$D^n = \begin{bmatrix} \frac{1 - \sqrt 5}{2} & 0 \\ 0 & \frac{1 + \sqrt 5}{2} \end{bmatrix}^n = \begin{bmatrix} \left(\frac{1 - \sqrt 5}{2}\right)^n & 0 \\ 0 & \left(\frac{1 + \sqrt 5}{2}\right)^n \end{bmatrix}$$
After calculating $B^{-1}$ we can solve $\begin{bmatrix} f_{n+1} \\ f_n \end{bmatrix} = BD^nB^{-1}$ for $f_n$ to get our closed expression.
$$ f_n = \frac{1}{\sqrt 5} \left(\left(\frac{1 + \sqrt 5}{2}\right)^n - \left(\frac{1 - \sqrt 5}{2}\right)^n\right) $$
# Final thoughts
Whenever I first happened upon the closed formula for the $n$-th Fibonacci number it seemed so shockingly random, a formula with bunch of square roots always giving me a recursively specified integer. After I learned this proof it doesn't feel as random anymore, instead, I feel it would be more surprising if we carried out the whole diagonalization process and ended up with no roots. Perhaps more importantly, it opened my eyes to the usage of linear algebra as a powerful mathematical tool, and not just something to be applied in geometry, flow balancing or computer graphics.
# Addendum
It was pointed out to me [on mastodon](https://mastodon.vierkantor.com/@Vierkantor/109978590835441958) that this technique is of interest even if it is not possible to diagonalize the stepping matrix. This is because using fast binary exponentiation one can perform matrix exponentiation in logarithmic time. Thus any linearly recursive algorithm can be calculated in logarithmic time!
Fast binary exponentiation uses the identity $A^{2k} = (A \cdot A)^k$, thus we can split the exponent in 2 when even, rather than performing $2k$ multiplications. Recursively doing this each time the exponent is even yields logarithmic time exponentiation.

View File

@ -0,0 +1,150 @@
---
title: "Haskell and Logic"
date: 2022-12-15
---
Haskell programs are proofs, don't you know? Hidden inside the types of Haskell lies a deep connection to mathematical logic. Given some basic Haskell knowledge and mathematical interest, you too may experience the sense of wonder I first did when discovering the Curry-Howard isomorphism.
<!--more-->
## What is logic?
Before we can talk about logic in Haskell we have to talk about propositional logic. Propositional logic is a set of rules telling us how to construct propositions, and how to derive conclusions from true ones.
First, we shall get a feel for what a proposition is by breaking down some examples. Consider the following statements: "It is sunny outside", "The bank is open", "It is either sunny or rainy outside", and "If it is rainy then the bank is not open". The first two statements are "atomic", they cannot be broken down into smaller propositions, unlike the latter two statements which are built up from smaller propositions. Let us then specify the forms a propositions may have.
* The atomic propositions, which we shall call $a, b, c \dots$
* Truth and falsehood, written as $\top$ (truth) and $\bot$ (falsehood)
* The connectives, given two propositions $P, Q$ lets us talk about
* $P$ and $Q$ are true, written as $P \land Q$
* $P$ or $Q$ (or both) are true, written as $P \lor Q$
* Implication, given two propositions $P, Q$ lets us talk about
* if $P$ is true then $Q$ is, written as $P \to Q$
* Negation, which given a proposition $P$ lets us talk about
* $P$ is false, written as $\lnot P$
Combining these we can create larger propositions such as $a \to a \lor b$ or $a \lor (b \land \lnot c)$. We can now translate English non-atomic propositions into the language of propositional logic. "If it is rainy then the bank is not open" becomes $\textrm{it is rainy} \to \lnot (\textrm{bank is open})$.
Given the knowledge that a proposition $P$ is true, we may make conclusions about the propositions used to construct $P$.
* If $P \land Q$ is true, then both $P$ and $Q$ are true
* If $P \lor Q$ is true, then at least one of $P$ or $Q$ is true
* If $P \to Q$ is true, then $Q$ must be true whenever $P$ is
* If $\lnot P$ is true, then $P$ is false
To prove things one more concept needs to be introduced, that of tautologies and contradictions. A tautology is a statement that is true no matter if the atoms inside of it are true or not, and conversely, a contradiction is a statement that is always false. An example of a tautology is $a \to a \lor b$, if $a$ is true then $a \lor b$ is true since $a$ is, if $a$ is false then we need not concern ourselves with the motive of the implication. An important tautology following the same reasoning is $\bot \to P$. An example of a contradiction is $a \land \lnot a$, this proposition states that $a$ is both true and false, which is impossible.
In logic we are usually interested in proving that a proposition is either a tautology or a contradiction, proving or disproving its truth. One usually does this by following the rules of the logic, repeatedly applying them to yield a proof. However there is another way to do logic, through Haskell!
## Interpreting logic in Haskell
How does one translate from propositional logic to Haskell? We shall interpret types as propositions, and terms (elements) of a type as witnesses or proofs that the proposition (the type they inhabit) is true. The typechecking of Haskell will then double as a proof checker. This might seem abstract at first, but once you see how the building blocks of propositions translate into types it will become clear.
Atomic propositions ($a,b,c,\dots$) are interpreted as type variables `a`, `b`, `c`, etc. You might notice that there is no way to prove `a`, since one would have to create a proof `p :: a`, that is, a term inhabiting every type. This corresponds to how one cannot prove or disprove atomic propositions in propositional logic.
$\top$ may be interpreted as any type of which we know a term, that is, any proposition of which we know a proof. For the sake of readability we shall define a truth type, with a single constructor bearing witness to its truth.
```hs
data Truth = TruthWitness
```
Translating $\bot$ might seem more tricky, but it turns out Haskell lets us define empty types without constructors, that is, a proposition with no proof. We thus define the following.
```hs
data Falsehood
```
The connectives might seem more tricky, but turn out to be fairly straightforward as well. A proof of $P \land Q$ is simply a proof of $P$ and a proof of $Q$, but wait, that's a pair! $P \land Q$ is thus interpreted as `(P, Q)`. Likewise, a proof of $P \lor Q$ is *either* a proof of $P$ or a proof of $Q$, that's right, it's the `Either` type! $P \lor Q$ is interpreted as `Either P Q`.
The implication $P \to Q$ tells us that if we have a proof of $P$ then we have a proof of $Q$. So what we need is a way of turning terms of type $P$ into terms of type $Q$. That is, a function `P -> Q`, would you look at that, it's even the same arrow!
Perhaps surprisingly the trickiest part of the translation is negation. For $\lnot P$ to be true $P$ must be false, that is, there may not be any terms `p :: P` bearing witness to the truth of P. How can we express this within Haskell? The trick is to redefine $\lnot P$ as $P \to \bot$, that is, $\bot$ is true if $P$ is. But since we *know* that $\bot$ is false, this means $P$ cannot be true. Thus we interpret $\lnot P$ as `P -> Falsehood`. To aid readability we will make the definition below.
```hs
type Not p = p -> Falsehood
```
Now that we've introduced our language, let us speak! We shall prove the above-mentioned tautology $a \to a \lor b$ by constructing a function `a -> Either a b`. If you've used the `Either` type before then the proof is trivial.
```hs
proof1 :: a -> Either a b
proof1 = Left
```
Now let us prove that $a \land \lnot a$ is a contradiction, we shall do this by proving its negation $\lnot (a \land \lnot a)$. In doing so we need to construct a term of type `Not (a, Not a)`, that is, `(a, a -> Falsehood) -> Falsehood`. Thus, we must construct a function to `Falsehood` which has no inhabitants, however, we are given a function `a -> Falsehood` which we may use to get a term of the empty `Falsehood` type by applying the supplied value of type `a` to it.
```hs
proof2 :: Not (a, Not a) -- (a, a -> Falsehood) -> Falsehood)
proof2 (a, f) = f a
```
I strongly encourage you to play around with this on your own, coming up with some propositions and proving them by hand and using Haskell. The table below gives an overview of the translation from logic to Haskell.
<table>
<tr> <th>Logic</th> <th>Haskell</th> </tr>
<tr> <td>Proposition</td> <td>Type</td> </tr>
<tr> <td>Proof</td> <td>Term (element of type)</td> </tr>
<tr> <td>Atom</td> <td>Type variable</td> </tr>
<tr> <td>$\top$</td> <td>Type with a known term</td> </tr>
<tr> <td>$\bot$</td> <td>Type with no terms</td> </tr>
<tr> <td>$P \land Q$</td> <td><code>(P, Q)</code></td> </tr>
<tr> <td>$P \lor Q$</td> <td><code>Either P Q</code></td> </tr>
<tr> <td>$P \to Q$</td> <td><code>P -> Q</code></td> </tr>
<tr> <td>$\lnot P$</td> <td><code>P -> EmptyType</code></td> </tr>
</table>
## The paradoxes of Haskell
I admit, we did use a little too much effort when constructing `proof2`, there is a more succinct version, which type checks all the same.
```hs
proof2 :: Not (a, Not a) -- (a, a -> Falsehood) -> Falsehood)
proof2 = undefined
```
And here's another version!
```hs
proof2 :: Not (a, Not a) -- (a, a -> Falsehood) -> Falsehood)
proof2 = proof2
```
While both of these typecheck, we understand that they are not valid proofs, in fact, we can prove `Falsehood` in this exact way.
```hs
proof3 :: Falsehood
proof3 = proof3
```
This is not good, maybe programming languages don't correspond to logic after all? The solution is to "run" our proofs to check them, proofs defined this way will either crash or loop forever, so once we see that our proof terminates then we know the proof to be correct.
The programming languages studied by logicians guarantee termination and don't include values like `undefined` or `error` from Haskell (in fact, these values, called [bottom](https://wiki.haskell.org/Bottom), exist to model non-terminating programs). These programming languages, while they can check proofs entirely during typechecking, come with their limitations. Since there is no way to decide if an arbitrary program will terminate ([the halting problem](https://en.wikipedia.org/wiki/Halting_problem)) these programming languages necessarily place restrictions upon what programs are allowed in order to garantuee termination. Consequently, these languages are not [Turing-complete](https://en.wikipedia.org/wiki/Turing_completeness), meaning there are programs one cannot write in the language.
## Excluding the middle
The logic which we do in Haskell has a name, it's called constructive logic. The name is due to the fact that one must "construct" a proof, such as a function, in order to prove a theorem. Every theorem has an explicit construction that shows us how the proof is performed. This requirement, as it turns out, is quite strict, so strict that we can no longer prove all the things we could in classical logic. In particular, we will be unable to prove both $\lnot \lnot P \to P$ and $P \lor \lnot P$. Proving these classically is trivial, just consider the case where $P$ is true, and the case where $P$ is false, in both cases these propositions hold. In Haskell however, it's trickier, think about it, how could one write these programs?
```hs
impossible1 :: Not (Not a) -> a -- ((a -> Falsehood) -> Falsehood) -> a
impossible2 :: Either a (Not a) -- Either a (a -> Falsehood)
```
These two propositions have names, $P \lor \lnot P$ is called the law of excluded middle, and $\lnot \lnot P \to P$ is called double negation. Though it might not be obvious, double negation follows from excluded middle. By assuming excluded middle, we can even use Haskell to prove this.
```hs
assume_excluded_middle :: Either a (Not a)
assume_excluded_middle = error "Proof depends on excluded middle"
-- Since there are no values of Falsehood this is a perfectly
-- fine constructive definition. One cannot call this function
-- without already using an error or infinite loop.
-- Proof-oriented programming languages will let you construct
-- a function of this type without using `error`.
if_pigs_fly :: Falsehood -> a
if_pigs_fly f = error "cannot happen"
double_negation :: Not (Not a) -> a
double_negation notnota = case assume_excluded_middle of
Left a -> a
Right nota -> if_pigs_fly (notnota nota)
```
In Haskell our propositions are types, which carry proofs around. We cannot simply assume that any proposition must be true or false, because in doing so one would have to either produce a proof that it is true, or a proof that it is false. This is the very reason we are not able to prove excluded middle, as well as the reason that double negation follows from excluded middle. Excluded middle is the very rule telling us that we may assume every proposition is either true or false, excluding the possibility of a middle value.
Logicians have given a name to logics in which one cannot prove excluded middle, these logics are called intuitionistic. Initially it might be confusing why one would wish to work in a logic where not every proposition may be assumed to be either true or false, such a fact seems obvious. Likewise, mathematicians have historically been [at each other arguing over the validity of excluded middle](https://en.wikipedia.org/wiki/Brouwer%E2%80%93Hilbert_controversy#Validity_of_the_law_of_excluded_middle). Though intuitionistic logic might seem counter-productive there are today multitudes of reasons to work without excluded middle. A philosophical reason is provided by [Gödel's incompleteness theorem](https://en.wikipedia.org/wiki/G%C3%B6del%27s_incompleteness_theorems#Completeness), which states that in all sufficiently powerful systems of logic there will be statements that are neither provable nor disprovable. In the presence of this knowledge, it might be unsettling that excluded middle lets one assume these statements are either true or false. There are also practical reasons to work in intuitionistic logic. There are a lot of objects in mathematics that interpret intuitionistic logic (such as Haskell), meaning results in intuitionistic logic apply to more of mathematics. Generality is the true strength of intuitionism. An intuitionistic proof holds in a classical setting, but a classical proof may not be interpreted intuitionistically.
## Wrapping up
If you've made it this far, then congratulations! Even though I've tried to make this blog post introductory it's by no means trivial. Hopefully you are now feeling the same wonder as I did when stumbling into this topic. I'd like to end by broadly writing about where one might head next.
This blog post only covers propositional logic, but one can interpret predicate logic and even higher-order logic within programming languages. This is done with dependent type theory. In dependent type theories, types may themselves *depend* upon values, letting one create types such as `is-even n` which depend on a natural number `n`. This type would have terms which are witnesses that `n` is even. These programming languages, or proof assistants as they are usually called, enable you to prove properties of both programs and mathematical objects. In doing so they provide a way to automatically check mathematical proofs for correctness. There are already many mathematicians using these tools to create formalized proofs of mathematical theorems.
If you are interested in learning more about dependent type theory, then you might be interested in downloading and playing around with a proof assistant. I recommend [agda](https://agda.readthedocs.io/en/latest/getting-started/installation.html) to those familiar with Haskell; a lengthy [list of agda tutorials](https://agda.readthedocs.io/en/latest/getting-started/tutorial-list.html) is included in its documentation. If you wish to learn more about how doing math with types rather than sets might lead to new insights and connections to topology, then I encourage you to learn about homotopy type theory, for which there are some great resources: [HoTTEST Summer School 2022 lectures](https://www.youtube.com/playlist?list=PLtIZ5qxwSNnzpNqfXzJjlHI9yCAzRzKtx), [HoTTEST Summer School 2022 GitHub](https://github.com/martinescardo/HoTTEST-Summer-School), [Introduction to Homotopy Type Theory](https://arxiv.org/abs/2212.11082), [1lab](https://1lab.dev/1Lab.intro.html), [HoTT Book](https://homotopytypetheory.org/book/).

31
src/posts/index.ts Normal file
View File

@ -0,0 +1,31 @@
// @ts-nocheck
import type { ComponentOptions } from 'vue'
export type Post = {
title : string,
date : Date,
slug : string,
path : string,
url : string,
component : ComponentOptions,
}
const imports = await Promise.all(
Object.values(import.meta.glob('./*.md')).map((m) => m())
)
const posts : Post[] = imports.map((post) => {
const slug = post.default.__name
const path = "/posts/" + slug
const url = path + ".html"
return {
title: post.title,
date: new Date(post.date),
slug, path, url,
component: post.default
}
})
posts.sort((a,b)=>-(a.date.getTime()-b.date.getTime()))
export default posts

View File

@ -0,0 +1,111 @@
---
title: "Running Linux on the Thinkpad X13 Yoga (Gen 2)"
date: "2021-10-09"
---
This is a rather short blog post on how to get the Thinkpad X13 Yoga (Gen 2) working properly on Linux. This includes, making audio work, removing screen tears, making the screen auto rotate, and making the desktop a comfortable size. <!--more-->I'm writing this cause there isn't that much info out there on this system, and hopefully this prevents duplicated efforts.
# Stylus and Touch
Work out of the box! No, seriously! If it doesn't work for you try finding out if your distribution has a Wacom driver package and install that.
# Audio
Out of the box Alsa did not detect any devices whatsoever. Though in the output of lspci it showed that the sound card had Intel snd drivers loaded. The solution to this was to simply download the sof-firmware package, on Ubuntu you need to install firmware-sof-signed.
# Video stutters/lockups/cursor lag
The video worked well for me mostly, though occasionally the cursor would lag behind, creating ghost copies of itself or terminals would freeze up for a second. I never had any issues with hardware acceleration though. This is solved by adding `i915.enable_psr=0` to the kernel commandline. If you're using GRUB (if you don't know what this means then you are) you need to append this inside the quotes of the line `GRUB_CMDLINE_DEFAULT` in the file `/etc/default/grub`. After saving you changes run `update-grub`. If you're not using grub or running a distro without `update-grub` I'm going to assume you know how to do this.
# HiDPI
I've got the 2K version of the X13, because it was cheaper (the more decked out one was for sale). Needless to say that that many pixels on a 13" screen can cause some visibility issues.
To solve this you just need to adjust the DPI setting in the display options.
If you're not using a desktop enviornment this is fixed by adding the following line to your ~/.Xresources.
```
Xft.dpi: 144
```
If you're using xinit/startx you'll also want to add this to your xinitrc (if it's not already there).
```sh
xrdb -merge "$HOME/.Xresources"
```
# Making the screen auto rotate
Note that you might not need this as some desktop environments handle this automatically.
Long story short, many other people have written, bad, half working scripts. I've hacked together these and written a, less bad, working script.
```sh
#!/bin/sh -e
# Auto rotate screen based on device orientation
# Might be necessary depending on your environment
export DISPLAY=:0
# put your display name here
DNAME=eDP1
# may change grep to match your touchscreen(s)
INDEVS="$(xinput | awk '/Wacom/ { split($8,a,"="); print a[2] }')"
rotate () {
ORIENTATION=$1
NEW_ROT="normal"
CTM="1 0 0 0 1 0 0 0 1"
# Set the actions to be taken for each possible orientation
case "$ORIENTATION" in
normal)
NEW_ROT="normal"
CTM="1 0 0 0 1 0 0 0 1"
;;
bottom-up)
NEW_ROT="inverted"
CTM="-1 0 1 0 -1 1 0 0 1"
;;
right-up)
NEW_ROT="right"
CTM="0 1 0 -1 0 1 0 0 1"
;;
left-up)
NEW_ROT="left"
CTM="0 -1 1 1 0 0 0 0 1"
;;
esac
xrandr --output "$DNAME" --rotate "$NEW_ROT"
for INDEV in $INDEVS; do
xinput set-prop "$INDEV" 'Coordinate Transformation Matrix' $CTM
done
}
monitor-sensor | while read -r line; do
case "$line" in
*orientation*)
set -- $line
rotate "$4"
;;
esac
done
```
To use this make sure to install iio-sensor-proxy, and then edit this line
```sh
DNAME=eDP1
```
to the name listed as connected by the output of the `xrandr` command.
Then mark the file as executable and add it to your autostart.
If your wallpaper looks weird after rotating you can edit the script, adding a line which re-sets your wallpaper after this line.
```sh
xrandr --output "$DNAME" --rotate "$NEW_ROT"
```

216
src/posts/secd.md Normal file
View File

@ -0,0 +1,216 @@
---
title: "A look at SECD machines"
date: "2021-12-10"
---
The SECD machine is a virtual machine designed to evaluate lambda expressions. It's purpose is to be a more natural goal for compilers than assembly while maintaining reasonable execution speed. In this blog post I will give you a quick intro to SECD machines and an overview of a simple implementation.
<!--more-->
## The SECD Machine
SECD stands for Stack Environment Control Dump, all of which but the environment are stacks in the SECD machine. The machine operates by reading instructions from the control stack which operate on itself and the other stacks. A lambda is implemented as its body coupled with its environment. Application is done with the "apply" instruction which pops a lambda off the stack and adds the next element of the stack to the lambdas environment environment, binding the variable of the lambda. The previous stacks are then pushed onto the dump stack. When the lambda has been fully reduced the return instruction is used to save the top of the stack, the result of the reduction, and restores the stacks from the dump stack.
## The Modern SECD Machine
This approach, while sound, suffers from some performance issues. Thankfully, there are a number of improvements which can be made. The first of which is the use of De Brujin indecies. In De Brujin index notation each variable is a number indexing which lambda it was bound at (counting outwards). Here are some examples:
$$\lambda x.x \xRightarrow[]{\text{De Brujin}} \lambda \#0$$
$$\lambda f.\lambda g.\lambda x. f(gx) \xRightarrow[]{\text{De Brujin}} \lambda\lambda\lambda \#2(\#1\#0)$$
The benefit in using De Brujin notation is that, rather than a map, a dynamic array may be used for the environment, with variables being idecies into said array. An additional benifit is that the machine does not need to concern itself with alpha equivalence. As a side-note, counting conventionally starts at $\#1$, not $\#0$, however in a virtual machine counting from $\#0$ is, of course, much more natural.
The second improvement to be made is to "dump" the dump stack. Rather than using the dump stack the current state will be pushed onto the stack as a closure which the return instruction can then restore when exiting a lambda.
The third improvement is to introduce a new, static "stack". The rationale for this is that many programs will include top level definitions which are in scope throughout the program. Including these definitions in the environment whenever a closure is created is a waste of both memory and processing power.
The fourth improvement is allowing multi-variable lambdas. In the case of the SECD machine using a multi-variable lambda rather than multiple single variable lambdas corresponds to not nesting closures, which would lead to redundant copies of the same environment being made. Note that this does not sacrifice currying, as will become clear in later sections.
## An implementation of a simple SECD machine
In this section a SECD machine implemented in Haskell will be introduced bit by bit. This machine is by no means an efficient nor powerful implementation, it serves only to demonstrate the core concepts of the SECD machine.
### Data structures
The machine will only be able to recognise two types of values. Integers and Closures.
```hs
data Val
= I Int
| Clo Int [Val] [Inst] -- number of arguments, environment, instructions
deriving (Eq, Show)
```
The machine comes with the following instruction set.
```hs
data Inst
= Const Int
| Global Int
| Local Int
| Closure Int Int -- number of args, number of instructions
| App
| Ret
| Dup
| Add
deriving (Eq, Show)
```
### The instructions
* Const pushes a constant onto the stack.
* Global pushes a global variable onto the stack.
* Local pushes a local variable onto the stack.
* Closure creates a closure taking a set number of arguments and consuming a set amount of instructions from the control stack. The resulting closure is pushed onto the stack.
* App pops a closure and an argument from the stack. If the closure takes more than one argument the argument is pushed onto the closures environment and the closure is pushed back onto the stack. If the closure takes only one argument the closure will, rather than being pushed onto the stack, replace the current environment, such that the closures instructions are placed in the control stack, and its environment placed in the environment stack. A closure is then formed from the old control and environment stacks, which is pushed onto the stack.
* Ret pops a closure in the second index of the stack and installs it as the current control and environment stacks. The element at the top of the stack remains untouched, yielding the result of an application.
* Dup duplicates the element at the top of the stack. It's only included as a matter of convenience.
* Add pops the top two elements off the stack, adds them, and pushes the result back onto the stack.
#### Instruction Table
All stacks grow from right to left, that is, the left most element is at the top of the stack.
{% katexmm %}
<table>
<tr>
<th align=center colspan="3">Before</th><th align=center colspan="3">After</th>
</tr>
<tr>
<th>Control</th><th>Env</th><th>Stack</th><th>Control</th><th>Env</th><th>Stack</th>
</tr>
<tr>
<td>Const i : c</td><td>e</td><td>s</td><td>c</td><td>e</td><td>i : s</td>
</tr>
<tr>
<td>Global i : c</td><td>e</td><td>s</td><td>c</td><td>e</td><td>Globals[i] : s</td>
</tr>
<tr>
<td>Local i : c</td><td>e</td><td>s</td><td>c </td><td> e</td><td>e[i] : s</td>
</tr>
<tr>
<td>Closure n a : $c_1$ ... $c_n$ : c</td><td>e</td><td>s</td><td>c</td><td>e</td><td>Closure a {e} [$c_1$ ... $c_n$] : s</td>
</tr>
<tr>
<td>App : c</td><td>e</td><td>Closure {e'} [c'] : v : s</td><td>c'</td><td>v : e'</td><td>Closure$^0$ {e} [c] : s</td>
</tr>
<tr>
<td>App : c</td><td>e</td><td>Closure$^n$ {e'} [c'] : v : s</td><td>c</td><td>e</td><td>Closure$^{n - 1}$ {v : e'} [c'] : s</td>
</tr>
<tr>
<td>Ret : c </td><td>e</td><td>v : Closure$^0$ \{e'\} [c'] : s </td><td> c'</td><td>e' </td><td> v : s</td>
</tr>
<tr>
<td>Dup : c</td><td>e</td><td>v : s</td><td>c</td><td>e</td><td>v : v : s</td>
</tr>
<tr>
<td>Add : c </td><td>e</td><td>v : s</td><td>c</td><td>e</td><td>v + v : s</td>
</tr>
</table>
{% endkatexmm %}
### An example
Consider the following lambda expression:
$$(\lambda f.\lambda g.\lambda x. f(gx)) (\lambda x. x + 1) (\lambda x. x + 1) 0$$
It composes a lambda which increments its input by one with an identical lambda and then applies 0 to the result. In order to run this on our SECD machine it must first be rewritten with De Brujin index notation.
$$(\lambda \lambda \lambda \#2(\#1\#0)) (\lambda 1 + \#1) (\lambda 1 + \#1)0$$
Now in order to create an instruction tape for the SECD machine it must be rewritten, using Closure as lambdas, Local $\#i$ for variables and keeping in mind to explicitly apply to and return from lambdas. Additionally it will have to be written in postfix form due to the nature of stack based machines. After performing these actions the expression has been transformed into the following tape:
$$Const 0 : Closure [ Const 1 : Local \#0 : Add : Ret ] : Closure [ Const 1 : Local \#0 : Add : Ret ] :$$
$$Closure^3 [ Local \#0 : Local \#1 : App : Local \#2 : App : Ret ] : App : App : App$$
Reading this (or one of the previous lambda expressions) backwards you should be able to convince yourself of the validity of this translation.
This can now directly be translated into a list of instructions for the Haskell machine implementation.
```hs
[ Const 0, Closure 1 4, Const 1, Local 0, Add, Ret, Dup
, Closure 3 6, Local 0, Local 1, App, Local 2, App, Ret
, App, App, App]
```
Which can then be evaluated in order to yield the normalized expression.
```hs
λ eval [] [ Const 0, Closure 1 4, Const 1, Local 0, Add, Ret, Dup
, Closure 3 6, Local 0, Local 1, App, Local 2, App, Ret
, App, App, App]
[I 2]
```
As expected, the expression reduces to 2.
## What's next?
While this is a functional implementation, it does have a few fatal flaws. First of, it's usage of linked lists. While more natural in haskell a real SECD machine should of course use arrays to achieve constant variable lookup time. Second is the perhaps more obvious lack of features, to implement an interesting language one would need a more complete instruction set. Third is the representation of the tape. In a real-world SECD machine one would have a succint binary representation, allowing for simple compact storage of programs and speedy execution.
## The Haskell source
The implementation itself is trivial and is little more than a Haskell translation of the instruction table above.
```hs
module SECD where
data Inst
= Const Int
| Global Int
| Local Int
| Closure Int Int -- args, scope
| App
| Ret
| Dup
| Add
deriving (Eq, Show)
data Val
= I Int
| Clo Int [Val] [Inst] -- args env code
deriving (Eq, Show)
vadd :: Val -> Val -> Val
vadd (I x1) (I x2) = I (x1 + x2)
vadd _ _ = error "attempted addition with closure"
data SEC = SEC [Val] [Val] [Inst] [Val] -- stack, environment, control, global
deriving Show
-- | Takes a global env, a list of instructions and returns and int
eval :: [Val] -> [Inst] -> [Val]
eval globals insts = go (SEC [] [] insts globals)
where
go :: SEC -> [Val]
go sec@(SEC stack env control global) = case control of
[] -> stack
(x:xs) -> case x of
Const i -> go (SEC (I i:stack) env xs global)
Global i -> go (SEC (global !! i:stack) env xs global)
Local i -> go (SEC (env !! i:stack) env xs global)
Closure a s -> go (SEC (Clo a env (take s xs):stack) env (drop s xs) global)
Ret -> let (v:(Clo 0 e c):st) = stack in go (SEC (v:st) e c global)
App -> case stack of
(Clo 1 e c:v:st) -> go (SEC (Clo 0 env xs:st) (v:e) c global)
(Clo n e c:v:st) -> go (SEC (Clo (n-1) (v:e) c:st) env xs global)
Dup -> let (v:st) = stack in go (SEC (v:v:st) env xs global)
Add -> let (x1:x2:st) = stack in go (SEC (vadd x1 x2:st) env xs global)
```
## Licensing Information
Feel free to use any of the code or concepts here in your own projects! The code can be considered free of copyright and all notions of liability and/or warranty.
## See Also
[Modern SECD Machine, University of Calgary](https://pages.cpsc.ucalgary.ca/~robin/class/521/lectures_lambda/secd.pdf) - Gave me the idea to scrap the D stack.
[SECD machine, Wikipedia](https://en.wikipedia.org/wiki/SECD_machine)

114
src/posts/why-be-pure.md Normal file
View File

@ -0,0 +1,114 @@
---
title: "Why be pure?"
date: "2022-12-10"
---
Newcomers to Haskell are often told that Haskell is "pure", or that it is "mathematical". It is often not clear what this means, or why these properties would be of interest to begin with. In this blog post I aim to combat this by showing why purity can be helpful, both in writing programs and improving performance.
<!--more-->
## What is purity?
Before we get into the why, we have to get into the what. Haskell is a purely functional language, but what does that mean? Purity tells us that functions in Haskell behave like mathematical functions. That is, functions in Haskell must always give eqaul output for equal input. In mathematics we'd hope that if we have a function $f$, and two values $x$ and $y$ such that $x = y$ then $f(x) = f(y)$ and the same is true for Haskell. This is the reason that you are not allowed to modify the value of variables in Haskell as you are in other programming languages such as Python. Consider the following Python program:
```py
n = 0
def f(x):
global n
n = n + x
return n
```
The function `f` will give a different result each time we call it with the same input.
```
>>> f(1)
1
>>> f(1)
2
>>> f(2)
4
>>> f(2)
6
```
But then $f(2)$ is not always equal to $f(2)$, so Python is not pure.
Now that we know what it means for Haskell to be pure, we can move on to the more interesting question. Why place these restrictions on our language? Requiring Haskell to be pure might seem arbitrary, or even counter-productive, but playing by the rules of mathematics will let us use the tools of mathematics!
## Equational Reasoning
One of the most powerful tools handed to us by mathematics is equational reasoning. It uses the fact that equality in Haskell behaves like mathematical equality, and in doing so it allows us to *prove* properties about Haskell programs in ways that are not possible in non-pure languages.
### A Gentle Example
To show what I mean, I will prove that `map (+1) [1,2,3] = [2,3,4]`. First, recall the definition of `map`:
```hs
map :: (a -> b) -> [a] -> [b]
map f [] = []
map f (x:xs) = f x : map f xs
```
Second, recall how lists are represented in Haskell. When we write `[1,2,3]`, this is shorthand for `1 : (2 : (3 : []))`. Now we can start using our equalities! The definition of `map` tells us that `map f (x : xs) = f x : map f xs`, thus `map (+1) (1 : 2 : 3 : []) = (+1) 1 : map (+1) (2 : 3 : [])` Continuing this reasoning yields us the following:
```hs
[map (+1) (1 : 2 : 3 : [])]
= (+1) 1 : [map (+1) (2 : 3 : [])]
= (+1) 1 : (+1) 2 : [map (+1) (3 : [])]
= (+1) 1 : (+1) 2 : (+1) 3 : [map (+1) ([])]
```
In each step the code within `[]` is replaced using the same equality rule. For the final step we look at the first case of map telling us that `map f [] = []`
```hs
= (+1) 1 : (+1) 2 : (+1) 3 : []
= 2 : 3 : 4 : []
= [2,3,4]
```
While this is mildly interesting, it's not very useful as we could have simply asked Haskell to evaluate the code for us in order to yield the same result.
### Proving an interesting property
A more exciting example is proving that `map f (map g xs) = map (f . g) xs` for all functions `f`, `g`, and lists `xs`. Recall the definition of `.`
```hs
(.) :: (b -> c) -> (a -> b) -> (a -> c)
(.) f g x = f (g x)
```
That is, `f . g` is a function that first applies `g` to an input and then applies `f` to the result of `g`. Thus we are proving that first mapping one function, and then another is the same as mapping the composition of said functions. All lists are in one of the forms `[]` or `x : xs`, thus it suffices to prove that the property holds for these two cases. If you are familiar with doing proofs by induction this is precisely what we will be doing.
First, consider the `[]` case:
```hs
map f (map g [])
= map f []
= []
= map (f . g) []
```
Secondly, consider the `x : xs` case, assuming that the property holds for `xs`:
```hs
map f (map g (x : xs))
= map f (g x : map g xs)
= f (g x) : map f (map g xs)
= f (g x) : map (f . g) xs [by our assumption]
= (f . g) x : map (f . g) xs
= map (f . g) (x : xs)
```
Now this is an interesting result! It tells us that any time we wish to map over two functions we may simply map over their composite instead. This is not only interesting but has performance implications as well, since it tells us we only need to use map once!
## Avoiding Duplication
In programming there are many cases when data has to be duplicated. If we assign a value to a variable `x` and then assign `y = x`, then we often don't want `y` to change when `x` does. In order to avoid this the data stored in `x` is duplicated and then stored in `y`. This duplication takes time and memory, so most programming languages don't do it by default, instead, they have the user specify when a duplication is needed. In practice when creating large programs or libraries which need to work without any assumptions, one often has to employ considerable amounts of data duplication.
How does this relate to Haskell and purity? Since Haskell is pure data can never be modified, and there is thus never a need for data duplication. It's fine to pass around references as their contents will never be modified. When structures are rebuilt the old contents are reused and only the parts that are being changed are recreated. This allows for more efficient use of both memory and processing time.
## The Takeaway
The purity of Haskell allows for mathematical reasoning about programs. This not only makes it possible to be more confident in the correctness of our programs but can be used in order to optimize code as well. In fact, the primary Haskell compiler, GHC, uses these guarantees in order to optimize programs. The restrictions imposed by purity turn into properties that programmers and compilers alike may use to their advantage.

View File

@ -0,0 +1,280 @@
---
title: "Writing Portable Makefiles"
date: "2021-01-22"
---
Makefiles are a great portable tool for compiling your programs which any POSIX compliant system will provide, however, a lot of Makefiles are GNU Makefiles, which are less portable. This blog post will not only explain how to write Makefiles, but how to emulate GNU make features.
<!--more-->
## An introduction to Makefiles
In a Makefile, you can specify variables or macros as they are called in the world of Makefiles. All environment variables will also be available as macros.
```make
half_name=hello
in_files=hello.o world.o
```
These can be referenced later by using `$(NAME)` or `${NAME}`.
```make
full_name=$(half_name)_world
```
You're also able to write comments by starting lines with a `#`.
```make
# This line is a comment which will be ignored
```
Targets are used to specify how files are made. A target is defined by the following syntax:
```make
filename: depends
action
action2
action3
```
This tells make that the file "filename" needs the file "depends" to be present to be made. To make "filename" it then runs the action lines in the system shell. Each line is run in a separate shell instance. Macros can be used in both target names, depends, and actions. Much like in shell macros which contain spaces will be split when evaluated, meaning a list of depends can be stored in a macro.
We can now use the macros we defined earlier to specify how to compile our hello world program:
```make
$(full_name): $(in_files)
$(CC) $(CFLAGS) -o $@ $(in_files)
```
Here I used some special macros, the `$(CC)` macro is used to let users specify their preferred C compiler in the `CC` environment variable, if it is not set it will default to `cc` or `c99`. This is preferable to hard coding `gcc`, `clang`, or whatever compiler you might use since not everyone will use the same tools as you. The `$(CFLAGS)` macro is added to let the user specify flags to send to the C compiler in their `CFLAGS` environment variable. Lastly, and maybe most importantly, the `$@` macro evaluates to the name of the current target, which in this case is `$(full_name)`, which in turn evaluates to `hello_world`. Ultimately this target will run the following shell:
```sh
cc -o hello_world hello.o world.o
```
But hold on, what about our hello.o and world.o files? Well, if they're already present make will happily use them, however, since we want to automate our build process we should specify how to build them too.
```make
hello.o: hello.c
$(CC) -c $(CFLAGS) -o $@ hello.c
world.o: world.c
$(CC) -c $(CFLAGS) -o $@ world.c
```
Now running make will run the following:
```sh
cc -c -o hello.o hello.c
cc -c -o world.o world.c
cc -o hello_world hello.o world.o
```
Now make knows how to make our full `hello_world` program, but why do we specify that `hello.o` depends on `hello.c`? We write the source ourselves, so surely there is no need to tell make this? Well, the beauty of make is that it checks the last edited date of depends to rebuild targets. In other words, if we edit hello.c and rerun make it will only run the following:
```sh
cc -c -o hello.o hello.c
cc -o hello_world hello.o world.o
```
This is because the `world.o` target is already up to date.
It's also worth mentioning that you can choose which target to make by running `make TARGET_NAME` and that by default the first defined target is the one ran.
We now have a complete Makefile for our hello world project!
```make
# Hello world Makefile
half_name=hello
in_files=hello.o world.o
full_name=$(half_name)_world
# main target
$(full_name): $(in_files)
$(CC) $(CFLAGS) -o $@ $(in_files)
# depends
hello.o: hello.c
$(CC) -c $(CFLAGS) -o $@ hello.c
world.o: world.c
$(CC) -c $(CFLAGS) -o $@ world.c
```
Of course, this isn't a complete guide to writing Makefiles but it should give you enough of a grasp on it to understand the rest of this post.
## A powerful GNU Makefile
The following is a Makefile written by a now-graduated senior of mine, slightly modified by me to show more non-standard syntax. It is very powerful, and I used it as a template for writing my own Makefiles for a long time. However, it uses a lot of GNU extensions, making it a perfect example of how to replace them. I've added some comments throughout the file which explain the nonstandard syntax.
```make
# GNU make uses ?= to define a macro if not already set by an environment
# variable. It also allows spaces between keys = and values.
TARGET_EXEC ?= a.out
BUILD_DIR ?= ./build
SRC_DIRS ?= ./src
# GNU make uses := to assign variable immediately instead of when needed.
# GNU make adds the syntax $(shell command) to assign a macro the output
# of a shell command.
SRCS := $(shell find $(SRC_DIRS) -name *.cpp -or -name *.c -or -name *.s)
# GNU make allows for string substitution in variables. Here each word in
# SRCS is represented by %, which is replaced by $(BUILD_DIR)/%.o
# meaning we get a full list of object files from out list of source files.
OBJS := $(SRCS:%=$(BUILD_DIR)/%.o)
INC_DIRS := $(shell find $(SRC_DIRS) -type d)
# GNU make adds the syntax $(addprefix prefix,words) which is used to
# add a prefix to the beginning of each word in words.
INC_FLAGS := $(addprefix -I,$(INC_DIRS))
CPPFLAGS ?= $(INC_FLAGS) -MMD -MP -pg -ggdb -std=c99 -pedantic -O2
LDFLAGS = -lm -lSDL2 -lSDL2_gfx -lSDL2_image -lSDL2_ttf -pg
CC = gcc
CXX = g++
# GNU make evaluates $^ to the full list of depends for the current target
$(BUILD_DIR)/$(TARGET_EXEC): $(OBJS)
$(CC) $^ -o $@ $(LDFLAGS)
# GNU make allows for generic targets matching patterns, here all files
# in $(BUILD_DIR) with the extention .s.o are targeted.
# assembly source
$(BUILD_DIR)/%.s.o: %.s
# GNU make adds the syntax $(dir file) used for getting the name
# of the directory a file resides in.
$(MKDIR_P) $(dir $@)
# GNU make adds the $< macro to all targets. This macro evaluates
# to the first dependency. In standard make this macro is only
# defined for inference rules (more on that later).
$(AS) $(ASFLAGS) -c $< -o $@
# c source
$(BUILD_DIR)/%.c.o: %.c
$(MKDIR_P) $(dir $@)
$(CC) $(CPPFLAGS) $(CFLAGS) -c $< -o $@
# c++ source
$(BUILD_DIR)/%.cpp.o: %.cpp
$(MKDIR_P) $(dir $@)
$(CXX) $(CPPFLAGS) $(CXXFLAGS) -c $< -o $@
# The special target .PHONY is standard and tells make that one or more
# targets do not correspond to a file. If this was not here make would
# not run the clean target if a file named clean existed.
.PHONY: clean profile stacktrace
clean:
$(RM) -r $(BUILD_DIR)
# In GNU make adding macro definitions after a target name defines these
# macros for that particular target.
profile: CPPFLAGS += -pg
profile: LDFLAGS += -pg
profile: $(BUILD_DIR)/$(TARGET_EXEC)
stacktrace: CPPFLAGS += -v -da -Q
stacktrace: $(BUILD_DIR)/$(TARGET_EXEC)
```
This Makefile will compile or assemble all C, C++, and assembly source in the source directory and then link it, making it easy to reuse since there is no need to specify individual files.
### Special targets
Before we start translating this to standard make I'm going to explain two special targets. There are many special targets, and `.PHONY` is one of them. For a full list of special targets and their purpose refer to the POSIX make definition. The two we're interested in here are `.POSIX` and `.SUFFIXES`. `.POSIX` is a target that should be defined before anything else in a standard Makefile, it tells make to not use any extensions which might collide with the standard. The `.SUFFIXES` is used to specify file extensions (suffixes) which make should recognize.
### Generic targets and suffix rules
The generic rules that GNU make provides can be very useful. Thankfully, we can recreate them without any non-standard extensions. Let's look at a generic GNU make target:
```make
%.o: %.c
$(CC) -c $(CFLAGS) -o $@ $<
```
In standard make we can use the `.SUFFIXES` target to ensure make knows of our file extensions.
```make
.SUFFIXES: .o .c
.c.o
$(CC) -c $(CFLAGS) -o $@ $<
```
Unfortunately it is not possible to specify another output directory like in the GNU make example, however, this is rarely necessary.
### Internal macros
In the GNU make example we had this target:
```make
$(BUILD_DIR)/$(TARGET_EXEC): $(OBJS)
$(CC) $^ -o $@ $(LDFLAGS)
```
This uses the non-standard `$^` macro, instead of this we could reuse `$(OBJS)`.
```make
$(BUILD_DIR)/$(TARGET_EXEC): $(OBJS)
$(CC) $(OBJS) -o $@ $(LDFLAGS)
```
If the depends are not already a macro you'd have to type them out manually. In general, I try to keep dependencies in macros to make this process easy.
### Shell and substitution macros
Standard make cannot replicate these features by itself, however, using a shell script and include line this behavior can be replicated.
For example, to emulate the following behavior:
```make
BUILD_DIR ?= ./build
SRC_DIRS ?= ./src
SRCS := $(shell find $(SRC_DIRS) -name *.cpp -or -name *.c -or -name *.s)
OBJS := $(SRCS:%=$(BUILD_DIR)/%.o)
INC_DIRS := $(shell find $(SRC_DIRS) -type d)
INC_FLAGS := $(addprefix -I,$(INC_DIRS))
```
We could put this in our Makefile:
```make
include config.mk
```
and create a script containing this:
```sh
#!/bin/sh
SRC_DIR="${BUILD_DIR:-./src}"
SRCS="$(find "$SRC_DIR" -name '*.cpp' -or -name '*.c' -or -name '*.s')"
OBJS="$(for SRC in $SRCS; do printf '%s ' "$SRC.o"; done; printf '\n')"
INC_DIRS="$(shell find "$SRC_DIR" -type d)"
INC_FLAGS="$(for DIR in $INC_DIRS; do printf '%s ' "-I$DIR"; done printf '\n')"
cat > config.mk <<-EOF
SRC_DIR=$SRC_DIR
SRCS=$SRCS
OBJS=$OBJS
INC_DIRS=$INC_DIRS
INC_FLAGS=$INC_FLAGS
EOF
```
Then, after creating new files one would rerun the script to recreate this configuration. This does require some more manual work, however, it also removes the need for make to run these shell commands on each invocation.
## Afterwords
I hope that after reading this, you, just like me have realized that writing standard Makefiles is easy and that they can be just as powerful as GNU Makefiles.
## See also
[POSIX make definition](https://pubs.opengroup.org/onlinepubs/9699919799/utilities/make.html)
[A tutorial on portable Makefiles by Chris Wellons](https://nullprogram.com/blog/2017/08/20/)

View File

@ -1,16 +1,31 @@
import { fileURLToPath, URL } from 'node:url'
import vue from '@vitejs/plugin-vue'
import Vue from '@vitejs/plugin-vue'
import Pages from 'vite-plugin-pages'
import Markdown from 'unplugin-vue-markdown/vite'
import mk from '@traptitech/markdown-it-katex'
export default {
plugins: [
vue(),
Pages()
Vue({
include: [/\.vue$/, /\.md$/],
}),
Pages(),
Markdown({
headEnabled: true,
markdownItSetup(md) {
md.use(mk)
}
})
],
resolve: {
alias: {
'@': fileURLToPath(new URL('./src', import.meta.url))
}
},
esbuild: {
supported: {
'top-level-await': true
},
}
}