first commit
This commit is contained in:
commit
35fb560b60
|
@ -0,0 +1,5 @@
|
||||||
|
dist-newstyle
|
||||||
|
.cache
|
||||||
|
_site
|
||||||
|
*.local
|
||||||
|
*~
|
|
@ -0,0 +1,6 @@
|
||||||
|
packages:
|
||||||
|
./
|
||||||
|
../achille
|
||||||
|
|
||||||
|
package site
|
||||||
|
optimization: True
|
|
@ -0,0 +1,281 @@
|
||||||
|
:root {
|
||||||
|
--width: 740px;
|
||||||
|
--lighter: #fff;
|
||||||
|
--light: #fff;
|
||||||
|
--mild: #d8dee9;
|
||||||
|
--dark: #4c566a;
|
||||||
|
--darker: #434c5e;
|
||||||
|
--black: #3b4252;
|
||||||
|
--blacker: #2e3440;
|
||||||
|
--yellow: #f8c325;
|
||||||
|
--red: #ee4242;
|
||||||
|
--pink: #de59a8;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font: 15px Inter, sans-serif;
|
||||||
|
line-height: 1.54;
|
||||||
|
margin: 0;
|
||||||
|
height: 100vh;
|
||||||
|
color: var(--dark);
|
||||||
|
background: var(--light);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
hyphens: auto;
|
||||||
|
font-weight: 400;
|
||||||
|
counter-reset: section;
|
||||||
|
}
|
||||||
|
|
||||||
|
main span.draft {
|
||||||
|
display: inline-block;
|
||||||
|
text-transform: uppercase;
|
||||||
|
background: var(--mild);
|
||||||
|
font-size: .8rem;
|
||||||
|
padding: .2em .5em;
|
||||||
|
border-radius: 3px;
|
||||||
|
font-weight: 300;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1, h2, h3 { color: var(--darker) }
|
||||||
|
|
||||||
|
main > section {
|
||||||
|
counter-reset: subsection;
|
||||||
|
}
|
||||||
|
|
||||||
|
main ::selection {
|
||||||
|
background: var(--yellow);
|
||||||
|
color: var(--black);
|
||||||
|
}
|
||||||
|
|
||||||
|
a.footnote-ref sup::before { content: '[' }
|
||||||
|
a.footnote-ref sup::after { content: ']' }
|
||||||
|
|
||||||
|
.post h2::before {
|
||||||
|
counter-increment: section;
|
||||||
|
padding: 0 .5em 0 0;
|
||||||
|
content: counter(section);
|
||||||
|
opacity: .3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.post section h3::before {
|
||||||
|
counter-increment: subsection;
|
||||||
|
padding: 0 .5em 0 0;
|
||||||
|
content: counter(section) "." counter(subsection);
|
||||||
|
opacity: .3;
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: var(--red);
|
||||||
|
text-decoration: none
|
||||||
|
}
|
||||||
|
a:hover {text-decoration: underline}
|
||||||
|
hr {
|
||||||
|
border: none;
|
||||||
|
height: 2px;
|
||||||
|
background: var(--mild);
|
||||||
|
margin: 2.5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
details summary {
|
||||||
|
cursor: pointer;
|
||||||
|
padding: .5em 1em;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
strong {color: var(--darker)}
|
||||||
|
|
||||||
|
#hd {background: var(--dark) }
|
||||||
|
|
||||||
|
#hd > section {
|
||||||
|
max-width: var(--width);
|
||||||
|
margin: 0 auto;
|
||||||
|
padding: 0 1em;
|
||||||
|
box-sizing: border-box;
|
||||||
|
display: flex;
|
||||||
|
line-height: 40px;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
#hd section > a {
|
||||||
|
margin:0 1em 0 0;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
#hd nav a {
|
||||||
|
margin: 0 0 0 1.2rem;
|
||||||
|
color: var(--light);
|
||||||
|
text-decoration: none;
|
||||||
|
text-transform: uppercase;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
#hd nav a:hover {border-bottom:2px solid var(--yellow)}
|
||||||
|
|
||||||
|
main, #ft {
|
||||||
|
padding: 2em 1em;
|
||||||
|
max-width: var(--width);
|
||||||
|
box-sizing: border-box;
|
||||||
|
width: 100%;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
p > span.display {
|
||||||
|
display: block;
|
||||||
|
overflow-x: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
main {flex-grow: 1 }
|
||||||
|
|
||||||
|
h1, h2, h3, h4 { font-weight: 500 }
|
||||||
|
strong {font-weight:600}
|
||||||
|
|
||||||
|
main header h1 {margin: 0}
|
||||||
|
main header p {margin: 0}
|
||||||
|
main header {margin: 0 0 2em}
|
||||||
|
|
||||||
|
main ul li { padding: 0 0 0 0 }
|
||||||
|
main ul { padding: 0 0 0 1.5em }
|
||||||
|
|
||||||
|
main ul.projects {
|
||||||
|
padding: 0;
|
||||||
|
margin: 2.5em 0 0;
|
||||||
|
background: #fafafb;
|
||||||
|
list-style: none;
|
||||||
|
border-radius: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
main ul.projects > li > a {
|
||||||
|
display: flex;
|
||||||
|
color: inherit;
|
||||||
|
padding: .5em 2em 1em;
|
||||||
|
transition: .2s background
|
||||||
|
}
|
||||||
|
|
||||||
|
main ul.projects > li img {
|
||||||
|
width: 40px;
|
||||||
|
margin: .5em 1.5em 0 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
header.project { display: flex }
|
||||||
|
header.project img {
|
||||||
|
width: 40px;
|
||||||
|
margin: 1em 2em 0 0;
|
||||||
|
}
|
||||||
|
header.project p {font-style: italic}
|
||||||
|
header.project ul {padding: 0}
|
||||||
|
header.project ul li {
|
||||||
|
font-size: .9em;
|
||||||
|
display: inline-block;
|
||||||
|
padding: .3em .5em;
|
||||||
|
font-family: monospace;
|
||||||
|
background: #eceff4;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
header.project ul li + li {margin: 0 0 0 1em;}
|
||||||
|
|
||||||
|
main ul.projects > li a:hover { background: #f2f4f7; text-decoration: none }
|
||||||
|
main ul.projects > li h2 { margin: 0; font-size: 1.4em; color: var(--black) }
|
||||||
|
main ul.projects > li p { margin: 0 }
|
||||||
|
main ul.projects li ul { padding: .5em 0 }
|
||||||
|
main ul.projects li ul li {
|
||||||
|
display: inline-block;
|
||||||
|
font-family: monospace;
|
||||||
|
font-size: 13px;
|
||||||
|
background: #fff;
|
||||||
|
border-radius: 3px;
|
||||||
|
list-style: none;
|
||||||
|
line-height: 2em;
|
||||||
|
padding: 0 .5em;
|
||||||
|
}
|
||||||
|
main ul.projects li ul li+li { margin: 0 0 0 1em }
|
||||||
|
|
||||||
|
main blockquote {
|
||||||
|
font: 16px serif;
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
main pre { padding: 0 0 0 1em }
|
||||||
|
main h2 { font-size: 1.8em; margin: 1em 0 .5em }
|
||||||
|
main h3 { font-size: 1.5em; margin: 1em 0 .5em }
|
||||||
|
|
||||||
|
#citations {margin: 2em 0 0}
|
||||||
|
dl {display:grid; gap: 1em; grid-template-columns: auto 1fr}
|
||||||
|
dt {text-align: right; font-weight: 500;}
|
||||||
|
.citation-label::before {content:'['}
|
||||||
|
.citation-label::after {content:']:'}
|
||||||
|
dd {margin:0}
|
||||||
|
dd p {margin:0}
|
||||||
|
|
||||||
|
code {font: .9em "Source Code Pro", monospace}
|
||||||
|
|
||||||
|
code, pre.sourceCode {
|
||||||
|
background: #eceff4;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
code {
|
||||||
|
display: inline-block;
|
||||||
|
padding: 0 .3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre > code {display: block}
|
||||||
|
|
||||||
|
#ft svg {
|
||||||
|
margin-left: .6em;
|
||||||
|
vertical-align: middle;
|
||||||
|
cursor: pointer;
|
||||||
|
opacity: .7;
|
||||||
|
}
|
||||||
|
#ft svg:hover {opacity:1}
|
||||||
|
|
||||||
|
pre.sourceCode {
|
||||||
|
margin:1em 0;
|
||||||
|
padding: .8em 0;
|
||||||
|
line-height: 1;
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre.sourceCode > code {
|
||||||
|
display: inline-block;
|
||||||
|
margin: 0 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
figure {
|
||||||
|
margin: 2em 0;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
figure img {
|
||||||
|
max-width: 100%;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
.al {color: #f00; font-weight: bold; }
|
||||||
|
.an {color: #60a0b0; font-weight: bold; font-style: italic; }
|
||||||
|
.at {color:#7d9029}
|
||||||
|
.bn {color:#40a070}
|
||||||
|
.cf {color:#007020; font-weight: bold; }
|
||||||
|
.ch {color:#4070a0}
|
||||||
|
.cn {color:#880000}
|
||||||
|
.co {color:#60a0b0; font-style: italic; }
|
||||||
|
.cv {color:#60a0b0; font-weight: bold; font-style: italic; }
|
||||||
|
.do {color:#ba2121; font-style: italic; }
|
||||||
|
.dt {color:#902000}
|
||||||
|
.dv {color:#40a070}
|
||||||
|
.er {color:#f00; font-weight: bold; }
|
||||||
|
.fl {color:#40a070; }
|
||||||
|
.fu {color:#06287e; }
|
||||||
|
.in {color:#60a0b0; font-weight: bold; font-style: italic; }
|
||||||
|
.kw {color:#007020; }
|
||||||
|
.op {color:#666}
|
||||||
|
.ot {color:#007020}
|
||||||
|
.pp {color:#bc7a00}
|
||||||
|
.sc {color:#4070a0}
|
||||||
|
.ss {color:#bb6688}
|
||||||
|
.st {color:#4070a0}
|
||||||
|
.va {color:#19177c}
|
||||||
|
.vs {color:#4070a0}
|
||||||
|
.wa {color:#60a0b0; font-weight: bold; font-style: italic; }
|
|
@ -0,0 +1 @@
|
||||||
|
Oh god why
|
|
@ -0,0 +1,39 @@
|
||||||
|
---
|
||||||
|
title: A first entry
|
||||||
|
---
|
||||||
|
|
||||||
|
This is the first entry of this blog.
|
||||||
|
I've been meaning to make one for the longest time but never got around to
|
||||||
|
actually write stuff down. After bike-shedding endlessly about which static-site
|
||||||
|
generator would be best for the task at end, and trying to build my own in
|
||||||
|
Haskell with no specification in mind, I finally settled on a minimal setup to
|
||||||
|
get this out of the way. Yes, it is *very* limited. But it does work for now, is
|
||||||
|
not bloated and easy to customize.
|
||||||
|
|
||||||
|
Every page is generated with `pandoc`_, and a tiny Makefile takes care of
|
||||||
|
dependency tracking and incremental builds. I also use `jq`_ to transform pandoc
|
||||||
|
metadata. Ultimately I would prefer something leaner than pandoc but
|
||||||
|
this is for later.
|
||||||
|
|
||||||
|
Till next time.
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
**2020/06/05:**
|
||||||
|
Of course I was going to ditch my janky setup at some point.
|
||||||
|
It happened sooner than expected but I'm trying to convince myself that the first
|
||||||
|
version allowed me to identity exactly what I need from my static site
|
||||||
|
generator. I am now using a slightly modified version of `Hakyll`_.
|
||||||
|
Here's to hoping I'll manage to craft the perfect workflow™ from it.
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
**2020/06/12:**
|
||||||
|
Hm, the move to `Hakyll`_ wasn't for long. I was fed up with how complicated
|
||||||
|
it is, while still being very limiting. So I started to work on a new project
|
||||||
|
called **achille**. You can find the `WIP documentation here </projects/achille/>`_.
|
||||||
|
My site is already built with it and so far it's really neat!
|
||||||
|
|
||||||
|
.. _pandoc: https://github.com/jgm/pandoc
|
||||||
|
.. _jq: https://stedolan.github.io/jq/
|
||||||
|
.. _Hakyll: https://jaspervdj.be/hakyll/
|
|
@ -0,0 +1,502 @@
|
||||||
|
---
|
||||||
|
title: Univalent Parametricity in Agda
|
||||||
|
draft: yes
|
||||||
|
---
|
||||||
|
|
||||||
|
.. highlight:: agda
|
||||||
|
.. default-role:: math
|
||||||
|
|
||||||
|
Assume you are proving properties on some object A. Moreover, assume that A is
|
||||||
|
known to be equivalent to some other object B. You ought to be able to apply the
|
||||||
|
properties you proved on A to B directly, as easily as you would in mathematics.
|
||||||
|
Likewise, any operation on A (think **computation**) ought to be "transformed"
|
||||||
|
into an "equivalent" operation on B automatically, for a good enough definition
|
||||||
|
of "equivalent". As it turns out, what ought to be simply isn't in proof
|
||||||
|
assistants. The proofs you write are tied to the representation you carry them
|
||||||
|
on.
|
||||||
|
|
||||||
|
In [Tabareau2019]_, they show how both univalence and parametricity are not quite
|
||||||
|
enough to give us what we are looking for. But they demonstrate that with
|
||||||
|
**univalent parametricity** you are able to get what you ought to have.
|
||||||
|
After failing to make a good enough implementation of it in Coq during an internship,
|
||||||
|
here is me trying to make one in Cubical Agda.
|
||||||
|
|
||||||
|
This post is following the literate programming tradition, using Agda 2.6.1.
|
||||||
|
|
||||||
|
.. [Tabareau2019] The Marriage of Univalence and Parametricity, 2019
|
||||||
|
(Nicolas Tabareau, Eric Tanter, Matthieu Sozeau)
|
||||||
|
|
||||||
|
Setting things up
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
.. raw:: html
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Imports</summary>
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
{-# OPTIONS --cubical --without-K --safe #-}
|
||||||
|
|
||||||
|
open import Agda.Primitive
|
||||||
|
open import Agda.Builtin.Sigma
|
||||||
|
|
||||||
|
open import Data.List.Base
|
||||||
|
open import Cubical.Data.Sigma.Properties
|
||||||
|
|
||||||
|
open import Cubical.Foundations.Function using (_∘_; const)
|
||||||
|
open import Cubical.Foundations.Prelude hiding (Type)
|
||||||
|
open import Cubical.Foundations.Isomorphism
|
||||||
|
open import Cubical.Foundations.Equiv
|
||||||
|
open import Cubical.Foundations.HLevels
|
||||||
|
open import Cubical.Foundations.Equiv.HalfAdjoint using (congEquiv)
|
||||||
|
open import Cubical.Foundations.Equiv.Properties
|
||||||
|
open import Cubical.Foundations.Univalence
|
||||||
|
|
||||||
|
variable i j : Level
|
||||||
|
|
||||||
|
infixr 1 _$_
|
||||||
|
_$_ : {A : Set i} {P : A → Set j} (f : ∀ x → P x) (x : A) → P x
|
||||||
|
f $ x = f x
|
||||||
|
|
||||||
|
id : {A : Set i} → A → A
|
||||||
|
id x = x
|
||||||
|
|
||||||
|
_×_ : (A B : Set i) → Set i
|
||||||
|
A × B = Σ A (const B)
|
||||||
|
|
||||||
|
.. raw:: html
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
|
||||||
|
We begin by specifying the type of binary relations, since those are what we
|
||||||
|
are going to manipulate from now on.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
_∼_ : (A B : Set i) → Set (lsuc i)
|
||||||
|
_∼_ {i} A B = A → B → Set i
|
||||||
|
|
||||||
|
∼-inv : {A B : Set i} → A ∼ B → B ∼ A
|
||||||
|
∼-inv R = λ b a → R a b
|
||||||
|
|
||||||
|
∼-comp : {A B C : Set i} → A ∼ B → B ∼ C → A ∼ C
|
||||||
|
∼-comp RAB RBC = λ a c → Σ _ λ b → RAB a b × RBC b c
|
||||||
|
|
||||||
|
Then we introduce our **type-indexed univalent logical relation**.
|
||||||
|
|
||||||
|
.. math:: {·⋈·} : {(A : \textsf{Set}_i) (B : \textsf{Set}_i) → \text{Rel}\ A\ B}
|
||||||
|
|
||||||
|
We use the notation `x≈y⟨A ⋈ B⟩` to denote `(x , y) ∈ (A ⋈ B)`. The shorthand `x≈y`
|
||||||
|
will appear when the relation is obvious.
|
||||||
|
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
record _⋈_ (A B : Set i) : Set (lsuc i) where
|
||||||
|
constructor UR
|
||||||
|
field ur : A ∼ B
|
||||||
|
open _⋈_
|
||||||
|
|
||||||
|
⋈-refl : (A : Set i) → A ⋈ A
|
||||||
|
⋈-refl _ = UR _≡_
|
||||||
|
|
||||||
|
⋈-sym : {A B : Set i} → A ⋈ B → B ⋈ A
|
||||||
|
⋈-sym RAB = UR (∼-inv (ur RAB))
|
||||||
|
|
||||||
|
⋈-trans : {A B C : Set i} → A ⋈ B → B ⋈ C → A ⋈ C
|
||||||
|
⋈-trans {B = B} RAB RBC = UR (∼-comp (ur RAB) (ur RBC))
|
||||||
|
|
||||||
|
We might be tempted to define an infix operator :code:`_≈_` to relate terms of type A and B,
|
||||||
|
provided the logical relation exists at A and B. However, after experimenting a bit with such an idea,
|
||||||
|
I found the instance search mechanism of Agda to be too limited. More on that later.
|
||||||
|
This also means we will not get the nice syntax of the initial Coq implementation for free.
|
||||||
|
|
||||||
|
We want :code:`⋈` to satisfy some constraints as it cannot be any relation::
|
||||||
|
|
||||||
|
isFun : {A B : Set i} (R : A ∼ B) → Set i
|
||||||
|
isFun {A = A} {B} R = (a : A) → isContr (Σ[ b ∈ B ] R a b)
|
||||||
|
|
||||||
|
record isBifun {A B : Set i} (R : A ∼ B) : Set i where
|
||||||
|
constructor _,_
|
||||||
|
field
|
||||||
|
fun : isFun R
|
||||||
|
funInv : isFun (∼-inv R)
|
||||||
|
open isBifun
|
||||||
|
|
||||||
|
funR : {A B : Set i} {R : A ∼ B} (F : isFun R) → A → B
|
||||||
|
center : {A B : Set i} {R : A ∼ B} (F : isFun R) → (x : A) → R x (funR F x)
|
||||||
|
contr₁ : {A B : Set i} {R : A ∼ B} (F : isFun R) → ∀ {x} {y} → R x y → funR F x ≡ y
|
||||||
|
contr₂ : {A B : Set i} {R : A ∼ B} (F : isFun R) → ∀ {x} {y} (r : R x y)
|
||||||
|
→ PathP (λ i → R x (contr₁ F r i)) (center F x) r
|
||||||
|
contr₂ f {x} {y} r = λ i → {! snd (snd (f x) (y , r) i) !}
|
||||||
|
|
||||||
|
isFunIsProp : {A B : Set i} (R : A ∼ B) → isProp (isFun R)
|
||||||
|
isFunInvIsProp : {A B : Set i} (R : A ∼ B) → isProp (isFun (∼-inv R))
|
||||||
|
|
||||||
|
isBifun⇒isEquiv : {A B : Set i} {R : A ∼ B} (F : isBifun R) → isEquiv (funR (fun F))
|
||||||
|
isBifun⇒≃ : {A B : Set i} {R : A ∼ B} (F : isBifun R) → A ≃ B
|
||||||
|
|
||||||
|
isFunRf : {A B : Set i} (f : A → B) → isFun (λ a b → f a ≡ b)
|
||||||
|
isFunRf f = λ a → (f a , refl)
|
||||||
|
, λ where (b , e) → ΣPathP (e , (λ i₁ i₂ → e (i₁ ∧ i₂)))
|
||||||
|
|
||||||
|
isFunRfInvisBifun : {A B : Set i} (f : A -> B) →
|
||||||
|
isFun (λ b a → f a ≡ b) → isBifun (λ a b → f a ≡ b)
|
||||||
|
isFunRfInvisBifun f fun .fun = isFunRf f
|
||||||
|
isFunRfInvisBifun f fun .funInv = fun
|
||||||
|
|
||||||
|
isFun-≡ : (A : Set i) → isFun (_≡_ {A = A})
|
||||||
|
isFun-≡ A = isFunRf id
|
||||||
|
|
||||||
|
isFun-equiv : {A B : Set i} {R R′ : A ∼ B} →
|
||||||
|
(∀ x y → R x y ≃ R′ x y) → isFun R → isFun R′
|
||||||
|
isFun-equiv e f a =
|
||||||
|
( funR f a , fst (e a (funR f a)) (center f a))
|
||||||
|
, λ where (b , a≈b) → let z = contr₁ f (invEq (e a b) a≈b)
|
||||||
|
v = contr₂ f (invEq (e a b) a≈b) in
|
||||||
|
ΣPathP (z , {!!})
|
||||||
|
|
||||||
|
isFun-comp : {A B C : Set i} {RAB : A ∼ B} {RBC : B ∼ C} →
|
||||||
|
isFun RAB → isFun RBC → isFun (∼-comp RAB RBC)
|
||||||
|
|
||||||
|
isBifun-≡ : (A : Set i) → isBifun (_≡_ {A = A})
|
||||||
|
isBifun-≡ A = isFun-≡ A
|
||||||
|
, isFun-equiv (λ x y → sym , record { equiv-proof = λ y≡x → (sym y≡x , refl)
|
||||||
|
, λ y₁ i₁ → {!!} }) (isFun-≡ A)
|
||||||
|
|
||||||
|
isBifun-inv : {A B : Set i} {R : A ∼ B} → isBifun R → isBifun (∼-inv R)
|
||||||
|
|
||||||
|
isBifun-comp : {A B C : Set i} {RAB : A ∼ B} {RBC : B ∼ C} →
|
||||||
|
isBifun RAB → isBifun RBC → isBifun (∼-comp RAB RBC)
|
||||||
|
|
||||||
|
isBifun-comp FAB FBC = {!!}
|
||||||
|
|
||||||
|
..
|
||||||
|
::
|
||||||
|
funR f = fst ∘ fst ∘ f
|
||||||
|
center f = snd ∘ fst ∘ f
|
||||||
|
contr₁ f {x} {y} r = cong fst $ snd (f x) (y , r)
|
||||||
|
|
||||||
|
isFun-comp {RBC = RBC} fab fbc a
|
||||||
|
= (c , b , Rab , Rbc ) , λ where
|
||||||
|
(c′ , b′ , Rab′ , Rb′c′) → {!!}
|
||||||
|
-- let z : (b , Rab) ≡ (b′ , Rab′)
|
||||||
|
-- z = (snd (fab a) (b′ , Rab′))
|
||||||
|
-- b≡b′ : b ≡ b′
|
||||||
|
-- b≡b′ = cong fst z
|
||||||
|
-- w : (c , Rbc) ≡ (c′ , {!!})
|
||||||
|
-- w = (snd (fbc b) (c′ , {!!}))
|
||||||
|
-- in ΣPathP ({!!} , λ i₁ → (b≡b′ i₁) , {!!} , {!!})
|
||||||
|
where
|
||||||
|
b = funR fab a
|
||||||
|
Rab = center fab a
|
||||||
|
c = funR fbc b
|
||||||
|
Rbc = center fbc b
|
||||||
|
|
||||||
|
isFunIsProp R = λ f g → funExt (λ a → isPropIsContr (f a) (g a))
|
||||||
|
|
||||||
|
isFunInvIsProp R = isFunIsProp (∼-inv R)
|
||||||
|
|
||||||
|
isBifun-inv b .fun = funInv b
|
||||||
|
isBifun-inv b .funInv = fun b
|
||||||
|
|
||||||
|
isBifun⇒≃ (f , g) = isoToEquiv $ iso (funR f) (funR g)
|
||||||
|
(λ b → cong fst (snd (f (funR g b)) (b , center g b)))
|
||||||
|
(λ a → cong fst (snd (g (funR f a)) (a , center f a)))
|
||||||
|
|
||||||
|
isBifun⇒isEquiv F = snd $ isBifun⇒≃ F
|
||||||
|
|
||||||
|
|
||||||
|
The original paper then defines a univalent logical relation at every type constructor of CIC.
|
||||||
|
We are working in Agda so we are morally using the type constructors of ITT (I think?),
|
||||||
|
but they are identical in practice.
|
||||||
|
|
||||||
|
We start with the relation at Set::
|
||||||
|
|
||||||
|
record ≈-Set (A B : Set i) : Set (lsuc i) where
|
||||||
|
field
|
||||||
|
rel : A ⋈ B
|
||||||
|
bifun : isBifun (ur rel)
|
||||||
|
open ≈-Set
|
||||||
|
|
||||||
|
⋈-Set : Set i ⋈ Set i
|
||||||
|
⋈-Set = UR ≈-Set
|
||||||
|
|
||||||
|
Transport
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
Assuming two types are related at Set, we can lift from one type to another::
|
||||||
|
|
||||||
|
□_ : {A B : Set i} ⦃ R : ur ⋈-Set A B ⦄ → A → B
|
||||||
|
□_ ⦃ R ⦄ = funR (R .bifun .fun)
|
||||||
|
|
||||||
|
This is not very interesting per se: from :code:`R : A ≈ B` we can
|
||||||
|
retrieve an equivalence from A to B
|
||||||
|
The interesting part will be to construct this :code:`R : A ≈ B` automatically.
|
||||||
|
|
||||||
|
Dependent products
|
||||||
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Given `A, B : \textsf{Set}_i` such that `A ⋈ B` is defined,
|
||||||
|
we define the relation `A → \textsf{Set}_i ⋈ B → \textsf{Set}_i` as:
|
||||||
|
|
||||||
|
.. math::
|
||||||
|
|
||||||
|
P ≈ Q⟨A → \textsf{Set}_i ⋈ B → \textsf{Set}_i⟩
|
||||||
|
\quad\iff\quad
|
||||||
|
∀(x : A)(y : B), x ≈ y⟨A ⋈ B⟩ ⇒ {P\ x} ≈ {Q\ y}⟨\textsf{Set}_i ⋈ \textsf{Set}_i⟩
|
||||||
|
|
||||||
|
Given:
|
||||||
|
|
||||||
|
- `A, B : \textsf{Set}_i` such that `A ⋈ B` is defined;
|
||||||
|
- `P : A → \textsf{Set}_i`, `Q : B → \textsf{Set}_i`
|
||||||
|
such that `P ≈ Q⟨A → \textsf{Set}_i ⋈ B → \textsf{Set}_i⟩`;
|
||||||
|
|
||||||
|
we define the relation `{Πx:A.P\ x ⋈ Πy:B.Q\ y}` as:
|
||||||
|
|
||||||
|
.. math::
|
||||||
|
|
||||||
|
f ≈ g ⟨ Πx:A.P\ x ⋈ Πy:B.Q\ y ⟩ \quad\iff\quad
|
||||||
|
∀ (x : A)(y : B), x ≈ y⟨A ⋈ B⟩ ⇒ {f\ x} ≈ {f\ y}⟨P\ x ⋈ Q\ y ⟩
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
⋈-∀′ : {A B : Set i} → A ⋈ B → (A → Set i) ⋈ (B → Set i)
|
||||||
|
⋈-∀′ R = UR λ P Q → ∀ {x y} → ur R x y → ur ⋈-Set (P x) (Q y)
|
||||||
|
|
||||||
|
⋈-∀ : {A B : Set i} (R : A ⋈ B) →
|
||||||
|
{P : A → Set i} {Q : B → Set i} → ur (⋈-∀′ R) P Q
|
||||||
|
→ (∀ x → P x) ⋈ (∀ y → Q y)
|
||||||
|
⋈-∀ RA RF = UR λ f g → ∀ {x y} (H : ur RA x y) → ur (RF H .rel) (f x) (g y)
|
||||||
|
|
||||||
|
We prove a few properties "just for fun"::
|
||||||
|
|
||||||
|
≈-refl : (A : Set i) → ur ⋈-Set A A
|
||||||
|
≈-sym : {A B : Set i} → ur ⋈-Set A B → ur ⋈-Set B A
|
||||||
|
≈-trans : {A B C : Set i} → ur ⋈-Set A B → ur ⋈-Set B C → ur ⋈-Set A C
|
||||||
|
|
||||||
|
..
|
||||||
|
::
|
||||||
|
|
||||||
|
≈-refl A = record
|
||||||
|
{ rel = ⋈-refl A
|
||||||
|
; bifun = isBifun-≡ A
|
||||||
|
}
|
||||||
|
|
||||||
|
≈-sym RAB = record
|
||||||
|
{ rel = ⋈-sym (rel RAB)
|
||||||
|
; bifun = isBifun-inv (bifun RAB)
|
||||||
|
}
|
||||||
|
|
||||||
|
≈-trans RAB RBC = record
|
||||||
|
{ rel = ⋈-trans (rel RAB) (rel RBC)
|
||||||
|
; bifun = isBifun-comp (bifun RAB) (bifun RBC)
|
||||||
|
}
|
||||||
|
|
||||||
|
Type Constructors are Univalently Parametric
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
We cannot prove the abstraction theorem inside the theory itself, that is:
|
||||||
|
every term is univalently related to itself. What we can do however,
|
||||||
|
is prove that every type constructor is univalently parametric.
|
||||||
|
|
||||||
|
.. math:: \textsf{Set}_i ≈ \textsf{Set}_i ⟨ \textsf{Set}_{i+1} ⋈ \textsf{Set}_{i+1} ⟩
|
||||||
|
|
||||||
|
|
||||||
|
Set
|
||||||
|
---
|
||||||
|
|
||||||
|
First we prove the fixpoint property of the univalent relation at Set::
|
||||||
|
|
||||||
|
Set≈Set : ur ⋈-Set (Set i) (Set i)
|
||||||
|
Set≈Set {i} = record
|
||||||
|
{ rel = ⋈-Set
|
||||||
|
; bifun = record
|
||||||
|
{ fun = λ A →
|
||||||
|
(A , ≈-refl A)
|
||||||
|
, λ where (B , A≈B) →
|
||||||
|
let A≡B = ua (isBifun⇒≃ (bifun A≈B))
|
||||||
|
in ΣPathP (A≡B , λ i → {!!})
|
||||||
|
; funInv = {!!}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Dependent Function Type
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
.. math::
|
||||||
|
|
||||||
|
\frac{{{A : \textsf{Set}_i \qquad B : \textsf{Set}_i \qquad A ≈ B ⟨ \textsf{Set}_i ⋈ \textsf{Set}_i⟩}
|
||||||
|
\atop
|
||||||
|
{P : A → \textsf{Set}_i \qquad Q : B → \textsf{Set}_i \qquad P ≈ Q ⟨A → \textsf{Set}_i ⋈ B → \textsf{Set}_i⟩}}
|
||||||
|
}{
|
||||||
|
∀ x → P\ x ≈ ∀ y → Q\ y ⟨\textsf{Set}_i ⋈ \textsf{Set}_i⟩
|
||||||
|
}
|
||||||
|
::
|
||||||
|
|
||||||
|
∀≈∀ : {A B : Set i} (RA : ur ⋈-Set A B) →
|
||||||
|
{P : A → Set i} {Q : B → Set i} (RF : ur (⋈-∀′ (rel RA)) P Q)
|
||||||
|
→ ur ⋈-Set (∀ x → P x) (∀ y → Q y)
|
||||||
|
∀≈∀ RA RF = record
|
||||||
|
{ rel = ⋈-∀ (rel RA) RF
|
||||||
|
; bifun = record
|
||||||
|
{ fun = λ f → (( λ b → funR (RF (center inv b) .bifun .fun) (f (funR inv b)))
|
||||||
|
, λ {x} {y} H → {!funR (RF H .bifun .fun) (f x)!})
|
||||||
|
, λ where (q , f≈q) → ΣPathP
|
||||||
|
(funExt (λ x → {!!}) , {!!})
|
||||||
|
; funInv = λ g → {!!}
|
||||||
|
}
|
||||||
|
} where inv = RA .bifun .funInv
|
||||||
|
|
||||||
|
Univalent relation for parametrized datatypes
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
When we declare a parametrized datatype `F : Δ → \textsf{Set}_i`, we introduce a new type constructor
|
||||||
|
inside the theory. Therefore we need to extend the univalent logical relation at this type former.
|
||||||
|
That is, given `x, y : Δ` such that `x ≈ y ⟨ Δ ⋈ Δ⟩`, we need to define `{F\ x ⋈ F\ y}`.
|
||||||
|
|
||||||
|
Let's look at a concrete exemple first.
|
||||||
|
|
||||||
|
The case of lists
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
Recall the definition of inductive lists:
|
||||||
|
|
||||||
|
.. code-block:: agda
|
||||||
|
|
||||||
|
data List (A : Set i) : Set i where
|
||||||
|
[] : List A
|
||||||
|
_∷_ : A → List A → List A
|
||||||
|
|
||||||
|
Our goal is to define, for `A, B : \textsf{Set}_i` such that `A ⋈ B` is defined, the relation
|
||||||
|
`{\textsf{List}\ A} ⋈ {\textsf{List}\ B}`. It seems reasonable to say that two lists
|
||||||
|
`xs` and `ys` are related iff they are the same length and their elements are related one-to-one.
|
||||||
|
|
||||||
|
We can define this relation inductively:
|
||||||
|
|
||||||
|
- `[] ≈ []`
|
||||||
|
- For any `x : A`, `y : B`, `xs : \textsf{List}\ A`, `ys : \textsf{List}\ B`
|
||||||
|
such that `x ≈ y` and `xs ≈ ys`, then `(x ∷ xs) ≈ (y ∷ ys)`
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
data ≈-List {A B : Set i} (R : A ⋈ B) : List A ∼ List B where
|
||||||
|
≈-[] : ≈-List R [] []
|
||||||
|
≈-∷ : ∀ {x y xs ys} → ur R x y → ≈-List R xs ys → ≈-List R (x ∷ xs) (y ∷ ys)
|
||||||
|
|
||||||
|
instance
|
||||||
|
⋈-List : {A B : Set i} ⦃ R : A ⋈ B ⦄ → List A ⋈ List B
|
||||||
|
⋈-List ⦃ R ⦄ = UR (≈-List R)
|
||||||
|
|
||||||
|
Now we have to prove that `\textsf{List}` is univalently parametric.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
List≈List : {A B : Set i} ⦃ R : ur ⋈-Set A B ⦄ → ur ⋈-Set (List A) (List B)
|
||||||
|
List≈List {A = A} {B} ⦃ R ⦄ = record
|
||||||
|
{ rel = ⋈-List ⦃ rel R ⦄
|
||||||
|
; bifun = (λ xs → (map (funR f) xs , xs≈fxs xs) , contr-R xs)
|
||||||
|
, (λ ys → (map (funR g) ys , gys≈ys ys) , contr-G ys)
|
||||||
|
}
|
||||||
|
where
|
||||||
|
f = R .bifun .fun
|
||||||
|
g = R .bifun .funInv
|
||||||
|
|
||||||
|
xs≈fxs : ∀ xs → ≈-List (rel R) xs (map (funR f) xs)
|
||||||
|
xs≈fxs [] = ≈-[]
|
||||||
|
xs≈fxs (x ∷ xs) = ≈-∷ (center f x) (xs≈fxs xs)
|
||||||
|
|
||||||
|
contr-R : ∀ xs (y : Σ (List B) (≈-List (rel R) xs)) → (map (funR f) xs , xs≈fxs xs) ≡ y
|
||||||
|
contr-R [] (.[] , ≈-[]) = refl
|
||||||
|
contr-R (x ∷ xs) (y ∷ ys , ≈-∷ x≈y xs≈ys) = ΣPathP
|
||||||
|
( cong₂ _∷_ (contr₁ f x≈y) (cong fst (contr-R xs (ys , xs≈ys)))
|
||||||
|
, λ i → ≈-∷ (contr₂ f x≈y i) (cong snd (contr-R xs (ys , xs≈ys)) i)
|
||||||
|
)
|
||||||
|
|
||||||
|
gys≈ys : ∀ ys → ≈-List (rel R) (map (funR g) ys) ys
|
||||||
|
gys≈ys [] = ≈-[]
|
||||||
|
gys≈ys (y ∷ ys) = ≈-∷ (center g y) (gys≈ys ys)
|
||||||
|
|
||||||
|
contr-G : ∀ ys (p : Σ (List A) (λ xs → ≈-List (rel R) xs ys))
|
||||||
|
→ (map (funR g) ys , gys≈ys ys) ≡ p
|
||||||
|
contr-G [] (.[] , ≈-[]) = refl
|
||||||
|
contr-G (y ∷ ys) (x ∷ xs , ≈-∷ x≈y xs≈ys) = ΣPathP
|
||||||
|
( cong₂ _∷_ (contr₁ g x≈y) (cong fst (contr-G ys (xs , xs≈ys)))
|
||||||
|
, λ i → ≈-∷ (contr₂ g x≈y i) (cong snd (contr-G ys (xs , xs≈ys)) i)
|
||||||
|
)
|
||||||
|
|
||||||
|
A bit of reflection magic
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Ok, now the harsh truth: Agda's instance search is very limited.
|
||||||
|
In particular, you cannot have two overlapping instances in scope.
|
||||||
|
In the original paper, proof search is doing the heavy work.
|
||||||
|
|
||||||
|
Let's try to make this with a tiny (tiny) bit of reflection.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
open import Agda.Builtin.Unit
|
||||||
|
open import Agda.Builtin.Nat
|
||||||
|
open import Agda.Builtin.Reflection
|
||||||
|
open import Reflection.TypeChecking.Monad.Syntax
|
||||||
|
|
||||||
|
build-ur : Type → Type → TC Term
|
||||||
|
-- hardcoded shit
|
||||||
|
build-ur (agda-sort (lit 0)) (agda-sort (lit 0)) = do
|
||||||
|
returnTC (def (quote Set≈Set) (arg (arg-info hidden relevant) (quoteTerm lzero) ∷ []))
|
||||||
|
|
||||||
|
build-ur (lam _ _) (lam _ _) = do
|
||||||
|
{!!}
|
||||||
|
|
||||||
|
build-ur (pi (arg (arg-info v₁ _) A) P) (pi (arg (arg-info v₂ _) B) Q) = do
|
||||||
|
A≈B ← build-ur A B
|
||||||
|
P≈Q ← build-ur (lam v₁ P) (lam v₂ Q)
|
||||||
|
returnTC (def (quote ∀≈∀) ( arg (arg-info visible relevant) A≈B
|
||||||
|
∷ arg (arg-info visible relevant) P≈Q
|
||||||
|
∷ []))
|
||||||
|
|
||||||
|
build-ur s t = typeError ( strErr "Unable to match types"
|
||||||
|
∷ termErr s
|
||||||
|
∷ strErr "and"
|
||||||
|
∷ termErr t
|
||||||
|
∷ [])
|
||||||
|
|
||||||
|
macro
|
||||||
|
ok : Name → Name → Term → TC ⊤
|
||||||
|
ok left right hole = do
|
||||||
|
A ← getType left
|
||||||
|
B ← getType right
|
||||||
|
A≈B ← build-ur A B
|
||||||
|
unify hole (def (quote ur) ( arg (arg-info visible relevant)
|
||||||
|
(def (quote rel) (arg (arg-info visible relevant) A≈B ∷ []))
|
||||||
|
∷ arg (arg-info visible relevant) (def left [])
|
||||||
|
∷ arg (arg-info visible relevant) (def right [])
|
||||||
|
∷ []))
|
||||||
|
|
||||||
|
postulate f : Set → Set
|
||||||
|
..
|
||||||
|
|
||||||
|
Ah, if only one had datatype-generic programming
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Ok, that's nice. But if you were to take a look at the proofs of the previous properties,
|
||||||
|
you will soon realise that although they are tedious, they have a very predictable structure.
|
||||||
|
By that I mean that there is nothing specific about lists in these proofs, and that they ought
|
||||||
|
to be derived for any datatype, rather than handwritten.
|
||||||
|
|
||||||
|
Now let's see how to derive the univalent logical relation for
|
||||||
|
user-defined datatypes. When implementing this in Coq, I had to rely on
|
||||||
|
MetaCoq_, and more specifically TemplateCoq_.
|
||||||
|
In Agda however, we have reflection built-in! There is are two key differences
|
||||||
|
that will make my life easier:
|
||||||
|
|
||||||
|
- universe levels are **first-class values**;
|
||||||
|
- we can leave **holes** in term that we are defining.
|
||||||
|
This is especially useful because it allows us to be less explicit
|
||||||
|
about universe levels and implicit function arguments.
|
||||||
|
|
||||||
|
.. _MetaCoq: https://github.com/MetaCoq/metacoq
|
||||||
|
.. _TemplateCoq: https://github.com/MetaCoq/metacoq
|
|
@ -0,0 +1,591 @@
|
||||||
|
---
|
||||||
|
title: Parametricity for described datatypes
|
||||||
|
draft: yes
|
||||||
|
---
|
||||||
|
|
||||||
|
.. highlight:: agda
|
||||||
|
.. default-role:: math
|
||||||
|
|
||||||
|
This entry is just me figuring stuff out by writing it down.
|
||||||
|
The goal is to **derive automatically the logical relation generated by
|
||||||
|
a datatype** in Agda, without metaprogramming. Or, to be correct,
|
||||||
|
without *unsafe* metaprogramming --- no reflection allowed.
|
||||||
|
|
||||||
|
*The title means nothing and this is WIP.*
|
||||||
|
|
||||||
|
Only one thing to keep in mind: we are using the dirtiest of tricks, :code:`--type-in-type`.
|
||||||
|
For the time being, this will do just fine.
|
||||||
|
|
||||||
|
.. raw:: html
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Prelude</summary>
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
{-# OPTIONS --type-in-type #-}
|
||||||
|
|
||||||
|
open import Agda.Primitive
|
||||||
|
open import Agda.Builtin.Unit
|
||||||
|
open import Agda.Builtin.Sigma
|
||||||
|
open import Agda.Builtin.Equality
|
||||||
|
open import Agda.Builtin.Nat
|
||||||
|
open import Agda.Builtin.List
|
||||||
|
|
||||||
|
variable A B : Set
|
||||||
|
|
||||||
|
infixr -1 _$_
|
||||||
|
infixl 2 _∘_
|
||||||
|
|
||||||
|
id : {A : Set} → A → A
|
||||||
|
id x = x
|
||||||
|
|
||||||
|
const : {A : Set} → A → {B : Set} → B → A
|
||||||
|
const x _ = x
|
||||||
|
|
||||||
|
_$_ : {A : Set} {P : A → Set} (f : ∀ x → P x) (x : A) → P x
|
||||||
|
f $ x = f x
|
||||||
|
|
||||||
|
_×_ : (A B : Set) → Set
|
||||||
|
A × B = Σ A (const B)
|
||||||
|
|
||||||
|
case_of_ : {A : Set} {P : A → Set} (x : A) → ((x : A) → P x) → P x
|
||||||
|
case x of f = f x
|
||||||
|
|
||||||
|
_∘_ : {A : Set} {P : A → Set} {Q : {x : A} → P x → Set}
|
||||||
|
(g : {x : A} → (y : P x) → Q y) (f : (x : A) → P x)
|
||||||
|
→ (x : A) → Q (f x)
|
||||||
|
_∘_ g f x = g (f x)
|
||||||
|
|
||||||
|
data Fin : Nat → Set where
|
||||||
|
zero : ∀ {n} → Fin (suc n)
|
||||||
|
suc : ∀ {n} → Fin n → Fin (suc n)
|
||||||
|
|
||||||
|
data Vec (A : Set) : Nat → Set where
|
||||||
|
[] : Vec A 0
|
||||||
|
_∷_ : ∀ {n} → A → Vec A n → Vec A (suc n)
|
||||||
|
|
||||||
|
lookup : {A : Set} {n : Nat}
|
||||||
|
→ Vec A n → Fin n → A
|
||||||
|
lookup (x ∷ _) zero = x
|
||||||
|
lookup (_ ∷ xs) (suc n) = lookup xs n
|
||||||
|
|
||||||
|
map : (A → B) → {n : Nat} → Vec A n → Vec B n
|
||||||
|
map f [] = []
|
||||||
|
map f (x ∷ xs) = f x ∷ map f xs
|
||||||
|
|
||||||
|
Rel : Set → Set → Set
|
||||||
|
Rel A B = A → B → Set
|
||||||
|
|
||||||
|
.. raw:: html
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
A universe of descriptions
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The first step is to equip ourselves with a universe of descriptions to encode
|
||||||
|
inductive indexed datatypes. See the work from Pierre-Evariste Dagand[#dagand]_
|
||||||
|
to learn about descriptions. The one chosen here is an extension of Yorick
|
||||||
|
Sijsling's [#yorick]_, with some additions:
|
||||||
|
|
||||||
|
.. [#dagand] In particular, his PhD thesis:
|
||||||
|
`Reusability and Dependent Types
|
||||||
|
<https://gallium.inria.fr/~pdagand/stuffs/thesis-2011-phd/thesis.pdf>`_
|
||||||
|
.. [#yorick] https://github.com/yoricksijsling/ornaments-thesis
|
||||||
|
|
||||||
|
- indices now depend on the parameters;
|
||||||
|
- recursive occurences can have different parameters.
|
||||||
|
**Actually I removed this for the time being, since it's not really important for what we are trying
|
||||||
|
to achieve. Still, can be done.**
|
||||||
|
|
||||||
|
Encoding telescopes 🔭
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
So we begin by defining an encoding of telescopes to encode the parameters and
|
||||||
|
indices of datatypes. I just found out it is very similar to what is
|
||||||
|
introduced in [Akaposi2015]_. What we are apparently doing is adding telescopes
|
||||||
|
and telescope substitutions in our theory, whereas they leave them outside of types.
|
||||||
|
|
||||||
|
.. [Akaposi2015] | **Towards cubical type theory**
|
||||||
|
| Thorsten Altenkirch, Ambrus Kaposi
|
||||||
|
| https://akaposi.github.io/nominal.pdf
|
||||||
|
|
||||||
|
In summary, we define **the type of telescopes** parametrized by some set `A`.
|
||||||
|
And given a telescope, and an element of `A`, we define **the type of telescope substitutions**:
|
||||||
|
|
||||||
|
.. math::
|
||||||
|
\gdef\set{\textsf{Set}}
|
||||||
|
\gdef\tele#1{\textsf{Tele}\ #1}
|
||||||
|
\gdef\rel#1#2{\textsf{Rel}\ #1\ #2}
|
||||||
|
|
||||||
|
A : \set\ &⊢\ \tele A : \set \\
|
||||||
|
A : \set,\ Ω : \tele A,\ x : A\ &⊢ ⟦ Ω ⟧\ x \ : \set
|
||||||
|
|
||||||
|
For our purposes, a telescope substitution is a huge Σ-type containing an element for
|
||||||
|
every type in the telescope. We can introduce empty telescopes or extend them by the right.
|
||||||
|
|
||||||
|
.. math::
|
||||||
|
\frac{}{A : \set \ ⊢\ ε : \tele A} \quad
|
||||||
|
\frac
|
||||||
|
{A : \set,\ Ω : \tele A,\ ⊢\ F : Σ\ A\ ⟦ Ω ⟧ → \set}
|
||||||
|
{A : \set,\ Ω : \tele A\ ⊢\ Ω\ ▷\ F : \tele A}
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
infixl 1 _▷_ _▷₀_
|
||||||
|
|
||||||
|
data Tele (A : Set) : Set
|
||||||
|
⟦_⟧ : {A : Set} → Tele A → A → Set
|
||||||
|
|
||||||
|
data Tele A where
|
||||||
|
ε : Tele A
|
||||||
|
_▷_ : (T : Tele A) → (Σ A ⟦ T ⟧ → Set) → Tele A
|
||||||
|
|
||||||
|
⟦ ε ⟧ x = ⊤
|
||||||
|
⟦ T ▷ F ⟧ x = Σ (⟦ T ⟧ x) (F ∘ (x ,_))
|
||||||
|
|
||||||
|
_▷₀_ : {A : Set} (T : Tele A) (B : Set) → Tele A
|
||||||
|
T ▷₀ B = T ▷ const B
|
||||||
|
|
||||||
|
Because telescopes are parametrized by some :code:`Set`,
|
||||||
|
we can define a telescope that depend on the telescope substitution of another telescope.
|
||||||
|
**That's how we encode parameters-dependent indices**. Describing the parameters and indices
|
||||||
|
of a datatype boils down to exhibiting some :code:`P` and :code:`I` such that
|
||||||
|
:code:`P : Tele ⊤` and :code:`I : Tele (⟦ T ⟧ tt)`.
|
||||||
|
|
||||||
|
We too can **extend telescopes with telescopes**.
|
||||||
|
For our purposes, it only makes sense to extend non-parametrized telescopes:
|
||||||
|
|
||||||
|
.. math::
|
||||||
|
Ω : \tele ⊤,\ Ω' : \tele (⟦ Ω ⟧\ tt)\
|
||||||
|
&⊢ \textsf{extend}\ Ω' : \tele ⊤ \\
|
||||||
|
Ω : \tele ⊤,\ Ω' : \tele (⟦ Ω ⟧\ tt)\
|
||||||
|
&⊢ \textsf{pr}\ : ⟦ \textsf{extend}\ Ω' ⟧\ tt → Σ\ (⟦ Ω ⟧\ tt)\ ⟦ Ω' ⟧
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
ExTele : Tele ⊤ → Set
|
||||||
|
ExTele T = Tele (⟦ T ⟧ tt)
|
||||||
|
|
||||||
|
Ctx : {T : Tele ⊤} → ExTele T → Set
|
||||||
|
Ctx Γ = Σ _ ⟦ Γ ⟧
|
||||||
|
|
||||||
|
extend : {T : Tele ⊤} → ExTele T → Tele ⊤
|
||||||
|
pr : {T : Tele ⊤} {Γ : ExTele T}
|
||||||
|
→ ⟦ extend Γ ⟧ tt → Ctx Γ
|
||||||
|
|
||||||
|
extend {T} ε = T
|
||||||
|
extend (G ▷ F) = extend G ▷ F ∘ pr ∘ snd
|
||||||
|
|
||||||
|
pr {Γ = ε} γ = γ , tt
|
||||||
|
pr {Γ = Γ ▷ F} (γ′ , x) =
|
||||||
|
let (t , γ) = pr γ′ in t , γ , x
|
||||||
|
|
||||||
|
Constructors
|
||||||
|
------------
|
||||||
|
|
||||||
|
Like Yorick we describe constructors first and datatypes second by giving a vector
|
||||||
|
of constructor descriptions. This has the benefit of following more closely the structure
|
||||||
|
of Agda datatypes::
|
||||||
|
|
||||||
|
data ConDesc {P : Tele ⊤} (Γ I : ExTele P) : Set
|
||||||
|
|
||||||
|
Both the constructor's current context :code:`Γ` and its indices :code:`I` are extensions
|
||||||
|
of the datatype parameters. The rest is pretty standard:
|
||||||
|
|
||||||
|
- :code:`κ` **marks the end of a constructor**. We simply compute indices from the current context.
|
||||||
|
- :code:`ι` **marks the position of a recursive occurence**. Here we provide indices
|
||||||
|
computed from the context. I am saddened by the fact that this
|
||||||
|
recursive occurence is not added to the context of the rest of the constructor.
|
||||||
|
This can probably be done but would require too much effort for what it's worth.
|
||||||
|
Who does that anyway?
|
||||||
|
- :code:`σ` **marks the introduction of a variable**. Its type is computed from the local context,
|
||||||
|
and the variable is added to the context for the rest of the constructor.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
data ConDesc {P} Γ I where
|
||||||
|
κ : (f : (γ : Ctx Γ) → ⟦ I ⟧ (γ .fst)) → ConDesc Γ I
|
||||||
|
ι : (f : (γ : Ctx Γ) → ⟦ I ⟧ (γ .fst)) → ConDesc Γ I → ConDesc Γ I
|
||||||
|
σ : (S : Ctx Γ → Set ) → ConDesc (Γ ▷ S) I → ConDesc Γ I
|
||||||
|
|
||||||
|
..
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
⟦_⟧ᶜ : {P : Tele ⊤} {Γ I : ExTele P}
|
||||||
|
(C : ConDesc Γ I)
|
||||||
|
→ (Ctx I → Set)
|
||||||
|
→ (Σ (⟦ P ⟧ tt) (λ p → ⟦ Γ ⟧ p × ⟦ I ⟧ p) → Set)
|
||||||
|
|
||||||
|
⟦ κ f ⟧ᶜ X (p , γ , i) = f (p , γ) ≡ i
|
||||||
|
⟦ ι f C ⟧ᶜ X g@(p , γ , _) = X (p , f (p , γ)) × ⟦ C ⟧ᶜ X g
|
||||||
|
⟦ σ S C ⟧ᶜ X (p , γ , i) = Σ (S (p , γ)) λ s → ⟦ C ⟧ᶜ X (p , (γ , s) , i)
|
||||||
|
|
||||||
|
Datatypes
|
||||||
|
---------
|
||||||
|
|
||||||
|
Moving on, we encode datatypes as a vector of constructor descriptions that
|
||||||
|
share the same parameters and indices telescopes. Then we tie the knot::
|
||||||
|
|
||||||
|
Desc : (P : Tele ⊤) (I : ExTele P) → Nat → Set
|
||||||
|
Desc P I n = Vec (ConDesc ε I) n
|
||||||
|
|
||||||
|
⟦_⟧ᵈ : {P : Tele ⊤} {I : ExTele P} {n : Nat}
|
||||||
|
(D : Desc P I n)
|
||||||
|
→ (Ctx I → Set)
|
||||||
|
→ (Ctx I → Set)
|
||||||
|
|
||||||
|
⟦_⟧ᵈ {n = n} D X (p , i) = Σ (Fin n) λ k → ⟦ lookup D k ⟧ᶜ X (p , tt , i)
|
||||||
|
|
||||||
|
data μ {n} {P : Tele ⊤} {I : ExTele P}
|
||||||
|
(D : Desc P I n) (pi : Ctx I) : Set where
|
||||||
|
⟨_⟩ : ⟦ D ⟧ᵈ (μ D) pi → μ D pi
|
||||||
|
|
||||||
|
|
||||||
|
We can also define some helper :code:`constr` to easily retrieve the `k` th constructor from a description::
|
||||||
|
|
||||||
|
module _ {P : Tele ⊤} {I : Tele (⟦ P ⟧ tt)} {n : Nat} (D : Desc P I n) where
|
||||||
|
|
||||||
|
Constr′ : {Γ : Tele (⟦ P ⟧ tt)}
|
||||||
|
→ ConDesc Γ I
|
||||||
|
→ Ctx Γ
|
||||||
|
→ Set
|
||||||
|
Constr′ (κ f ) pg = μ D (fst pg , f pg)
|
||||||
|
Constr′ (ι f C) pg = μ D (fst pg , f pg) → Constr′ C pg
|
||||||
|
Constr′ (σ S C) (p , γ) = (s : S (p , γ)) → Constr′ C (p , γ , s)
|
||||||
|
|
||||||
|
module _ {C′ : ConDesc ε I} (mk : {(p , i) : Ctx I} → ⟦ C′ ⟧ᶜ (μ D) (p , tt , i) → μ D (p , i)) where
|
||||||
|
|
||||||
|
constr′ : {Γ : Tele (⟦ P ⟧ tt)}
|
||||||
|
(C : ConDesc Γ I)
|
||||||
|
((p , γ) : Ctx Γ)
|
||||||
|
→ ({i : ⟦ I ⟧ p} → ⟦ C ⟧ᶜ (μ D) (p , γ , i) → ⟦ C′ ⟧ᶜ (μ D) (p , tt , i))
|
||||||
|
→ Constr′ C (p , γ)
|
||||||
|
constr′ (κ f ) pg tie = mk (tie refl)
|
||||||
|
constr′ (ι f C) pg tie x = constr′ C pg (tie ∘ (x ,_))
|
||||||
|
constr′ (σ S C) (p , γ) tie s = constr′ C (p , γ , s) (tie ∘ (s ,_))
|
||||||
|
|
||||||
|
-- | type of the kth constructor
|
||||||
|
Constr : (k : Fin n) (p : ⟦ P ⟧ tt) → Set
|
||||||
|
Constr k p = Constr′ (lookup D k) (p , tt)
|
||||||
|
|
||||||
|
-- | kth constructor
|
||||||
|
constr : (k : Fin n) (p : ⟦ P ⟧ tt) → Constr k p
|
||||||
|
constr k p = constr′ (λ x → ⟨ k , x ⟩) (lookup D k) (p , tt) id
|
||||||
|
|
||||||
|
Another useful operation is to retrieve a telescope for every constructor of datatype.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
module _ {P : Tele ⊤} {I : ExTele P} (X : Ctx I → Set) where
|
||||||
|
contotele′ : {Γ : ExTele P}
|
||||||
|
→ ConDesc Γ I
|
||||||
|
→ (T : ExTele P)
|
||||||
|
→ (((p , γ) : Ctx T) → ⟦ Γ ⟧ p)
|
||||||
|
→ ExTele P
|
||||||
|
contotele′ (κ _) T mk = T
|
||||||
|
contotele′ (ι f C) T mk =
|
||||||
|
contotele′ C (T ▷ λ (p , γ) → X (p , f (p , mk (p , γ))))
|
||||||
|
λ (p , γ , x) → mk (p , γ)
|
||||||
|
contotele′ (σ S C) T mk =
|
||||||
|
contotele′ C (T ▷ λ (p , γ) → S (p , (mk (p , γ))))
|
||||||
|
λ (p , γ , s) → (mk (p , γ)) , s
|
||||||
|
|
||||||
|
|
||||||
|
contotele : {P : Tele ⊤} {I : ExTele P} {n : Nat}
|
||||||
|
→ Desc P I n
|
||||||
|
→ Fin n
|
||||||
|
→ ExTele P
|
||||||
|
contotele D k = contotele′ (μ D) (lookup D k) ε (const tt)
|
||||||
|
|
||||||
|
Examples
|
||||||
|
--------
|
||||||
|
|
||||||
|
Some examples to reassure ourselves as to whether it works as intended::
|
||||||
|
|
||||||
|
module Examples where
|
||||||
|
|
||||||
|
natD : Desc ε ε 2
|
||||||
|
natD = κ (const tt)
|
||||||
|
∷ ι (const tt) (κ (const tt))
|
||||||
|
∷ []
|
||||||
|
|
||||||
|
nat : Set
|
||||||
|
nat = μ natD (tt , tt)
|
||||||
|
|
||||||
|
ze : Constr natD zero tt
|
||||||
|
ze = constr natD zero tt
|
||||||
|
|
||||||
|
su : Constr natD (suc zero) tt
|
||||||
|
su = constr natD (suc zero) tt
|
||||||
|
|
||||||
|
vecD : Desc (ε ▷₀ Set) (ε ▷₀ nat) 2
|
||||||
|
vecD = κ (const (tt , ze))
|
||||||
|
∷ σ (const nat)
|
||||||
|
(σ (λ (p , _) → p .snd)
|
||||||
|
(ι (λ (p , ((tt , n) , x )) → tt , n)
|
||||||
|
(κ (λ (_ , ((tt , n) , _)) → tt , (su n)))))
|
||||||
|
∷ []
|
||||||
|
|
||||||
|
vec : (A : Set) → nat → Set
|
||||||
|
vec A n = μ vecD ((tt , A) , tt , n)
|
||||||
|
|
||||||
|
nil : {A : Set} → Constr vecD zero (tt , A)
|
||||||
|
nil {A} = constr vecD zero (tt , A)
|
||||||
|
|
||||||
|
cons : {A : Set} → Constr vecD (suc zero) (tt , A)
|
||||||
|
cons {A} = constr vecD (suc zero) (tt , A)
|
||||||
|
|
||||||
|
xs : vec nat (su (su ze))
|
||||||
|
xs = cons _ (su ze) (cons _ (su (su ze)) nil)
|
||||||
|
|
||||||
|
So far so good. Let's move to the fun part, we're in for the big bucks.
|
||||||
|
|
||||||
|
|
||||||
|
From descriptions to descriptions
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
To keep our goal in sight, here is what should happen for lists::
|
||||||
|
|
||||||
|
module Translation where
|
||||||
|
|
||||||
|
module Goal where
|
||||||
|
|
||||||
|
listD : Desc (ε ▷ const Set) ε 2
|
||||||
|
listD = κ (const tt)
|
||||||
|
∷ σ (λ ((tt , A) , γ) → A)
|
||||||
|
(ι (λ (p , γ) → tt)
|
||||||
|
(κ (const tt)))
|
||||||
|
∷ []
|
||||||
|
|
||||||
|
list : Set → Set
|
||||||
|
list A = μ listD ((tt , A) , tt)
|
||||||
|
|
||||||
|
nil : {A : Set} → list A
|
||||||
|
nil {A} = ⟨ zero , refl ⟩
|
||||||
|
|
||||||
|
cons : {A : Set} → A → list A → list A
|
||||||
|
cons x xs = ⟨ suc zero , x , xs , refl ⟩
|
||||||
|
|
||||||
|
-- the following is the description we want to derive
|
||||||
|
-- listᵣD : Desc (ε ▷₀ Set ▷₀ Set ▷ λ (tt , (tt , A) , B) → Rel A B)
|
||||||
|
-- (ε ▷ (λ ((((_ , A) , B) , R) , tt) → list A)
|
||||||
|
-- ▷ (λ ((((_ , A) , B) , R) , tt , _) → list B)) 2
|
||||||
|
-- listᵣD = κ (λ ((((tt , A) , B) , R) , tt) → (tt , nil) , nil)
|
||||||
|
-- ∷ ( σ (λ (((( A) , B) , R) , tt) → A )
|
||||||
|
-- $ σ (λ ((((tt , A) , B) , R) , tt , _) → B )
|
||||||
|
-- $ σ (λ ((_ , R) , (tt , x) , y) → R x y )
|
||||||
|
-- $ σ (λ ((((tt , A) , B ) , R) , _) → list A )
|
||||||
|
-- $ σ (λ ((((tt , A) , B ) , R) , _) → list B )
|
||||||
|
-- $ ι (λ (γ , (_ , xs) , ys) → γ , (tt , xs) , ys )
|
||||||
|
-- $ κ (λ (_ , (((((_ , x) , y) , _) , xs) , ys)) → (tt , cons x xs) , cons y ys)
|
||||||
|
-- )
|
||||||
|
-- ∷ []
|
||||||
|
|
||||||
|
-- listᵣ : {A B : Set} (R : Rel A B) → list A → list B → Set
|
||||||
|
-- listᵣ {A} {B} R xs ys = μ listᵣD ((((tt , A) , B) , R) , (tt , xs) , ys)
|
||||||
|
|
||||||
|
-- nilᵣ : {A B : Set} {R : Rel A B} → listᵣ R nil nil
|
||||||
|
-- nilᵣ = ⟨ zero , refl ⟩
|
||||||
|
|
||||||
|
-- consᵣ : {A B : Set} {R : Rel A B}
|
||||||
|
-- → ∀ {x y } (x≈y : R x y)
|
||||||
|
-- → ∀ {xs ys} (xs≈ys : listᵣ R xs ys)
|
||||||
|
-- → listᵣ R (cons x xs) (cons y ys)
|
||||||
|
-- consᵣ {x = x} {y} x≈y {xs} {ys} xs≈ys =
|
||||||
|
-- ⟨ suc zero , x , y , x≈y , xs , ys , xs≈ys , refl ⟩
|
||||||
|
|
||||||
|
Hm. What we expect to generate looks like a mess. On a positive note it does
|
||||||
|
seem like we do not need to add recursive occurences to the context. It is also reassuring that
|
||||||
|
we can indeed encode the relation. Tbh the relation on its own is not *that* useful.
|
||||||
|
What would be great is if we were able to derive the abstraction theorem for this datatype too::
|
||||||
|
|
||||||
|
-- param : (R : Rel A A) (PA : ∀ x → R x x)
|
||||||
|
-- → (xs : list A) → listᵣ R xs xs
|
||||||
|
-- param R PA ⟨ zero , refl ⟩ = nilᵣ
|
||||||
|
-- param R PA ⟨ suc zero , x , xs , refl ⟩ = consᵣ (PA x) (param R PA xs)
|
||||||
|
|
||||||
|
This also looks quite doable.
|
||||||
|
|
||||||
|
Moving forward while keeping our head down.
|
||||||
|
|
||||||
|
|
||||||
|
Relating telescope substitutions
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
The first thing we need is a relation on telescope substitutions.
|
||||||
|
Naturally, because substitutions are encoded as Σ-types, two substitutions are related
|
||||||
|
iff their elements are related one to one, using the relational
|
||||||
|
interpretation of their respective types.
|
||||||
|
|
||||||
|
.. math::
|
||||||
|
T : \tele A,\ x_1 : A,\ x_2 : A,\ x_r : ⟦A⟧_p
|
||||||
|
\ ⊢ ⟦Π\ A\ ⟦ T ⟧ ⟧_p\ x_1\ x_2\ x_r : \rel {(⟦ T ⟧\ x_1)} {(⟦ T ⟧\ x_2)}
|
||||||
|
|
||||||
|
.. math::
|
||||||
|
⟦Π\ A\ ⟦ε⟧⟧_p\ x_1\ x_2\ x_r\ t_1\ t_2 &≡ ⊤ \\
|
||||||
|
⟦Π\ A\ ⟦T\ ▷\ F⟧⟧_p\ x_1\ x_2\ x_r\ (t_1 , s_1)\ (t_2 , s_2) &≡
|
||||||
|
Σ\ (⟦Π\ A\ ⟦T⟧⟧_p\ x_1\ x_2\ x_r\ t_1\ t_2)\ λ\ t_r\ .
|
||||||
|
\ ⟦ F ⟧_p\ t_1\ t_2\ t_r\ s_1\ s_2
|
||||||
|
|
||||||
|
The only problem here is that we need `⟦A⟧_p` and `⟦F⟧_p`. We cannot possibly compute it,
|
||||||
|
because `A` could be virtually *anything*, not just one of our friendly described datatypes.
|
||||||
|
An easy way out is to kindly ask for these relations. Therefore we introduce a new type
|
||||||
|
`\textsf{Help}\ T` associated with the telescope `T` we're trying to translate,
|
||||||
|
whose inhabitants must hold every required relation.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Help : Tele A → A → A → Set
|
||||||
|
|
||||||
|
-- I suspect we don't care about xᵣ so I'm omitting it
|
||||||
|
ₜ⟦_⟧ₚ : (T : Tele A) {x y : A} → Help T x y → Rel (⟦ T ⟧ x) (⟦ T ⟧ y)
|
||||||
|
|
||||||
|
Help ε _ _ = ⊤
|
||||||
|
Help (T ▷ F) x₁ x₂ = Σ (Help T x₁ x₂)
|
||||||
|
λ H → ∀ t₁ t₂ (tᵣ : ₜ⟦ T ⟧ₚ H t₁ t₂)
|
||||||
|
→ Rel (F (x₁ , t₁)) (F (x₂ , t₂))
|
||||||
|
|
||||||
|
ₜ⟦ ε ⟧ₚ tt tt tt = ⊤
|
||||||
|
ₜ⟦ T ▷ F ⟧ₚ (H , HF) (t₁ , s₁) (t₂ , s₂) =
|
||||||
|
Σ (ₜ⟦ T ⟧ₚ H t₁ t₂) λ tᵣ → HF t₁ t₂ tᵣ s₁ s₂
|
||||||
|
|
||||||
|
ExHelp : {P : Tele ⊤} → ExTele P → Set
|
||||||
|
ExHelp I = ∀ p₁ p₂ → Help I p₁ p₂
|
||||||
|
|
||||||
|
|
||||||
|
Not my type
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Our relation will be inductively defined, so we need to figure out beforehand its
|
||||||
|
parameters and indices. Parameters are easy, we expect two substitutions of the initial
|
||||||
|
parameters' telescope, and a proof that they are related.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
module _ {P : Tele ⊤} {I : ExTele P} {n : Nat}
|
||||||
|
(D : Desc P I n)
|
||||||
|
(HP : Help P tt tt) (HI : ExHelp I)
|
||||||
|
where
|
||||||
|
|
||||||
|
Pₚ : Tele ⊤
|
||||||
|
Pₚ = ε
|
||||||
|
▷₀ ⟦ P ⟧ tt
|
||||||
|
▷₀ ⟦ P ⟧ tt
|
||||||
|
▷ λ (_ , (_ , p₁) , p₂) → ₜ⟦ P ⟧ₚ HP p₁ p₂
|
||||||
|
|
||||||
|
p₁ : ⟦ Pₚ ⟧ tt → ⟦ P ⟧ tt
|
||||||
|
p₁ (((_ , p) , _) , _) = p
|
||||||
|
|
||||||
|
p₂ : ⟦ Pₚ ⟧ tt → ⟦ P ⟧ tt
|
||||||
|
p₂ (((_ , _) , p) , _) = p
|
||||||
|
|
||||||
|
Iₚ : ExTele Pₚ
|
||||||
|
Iₚ = ε
|
||||||
|
▷ (λ ((((_ , p₁) , _) , _) , _) → ⟦ I ⟧ p₁)
|
||||||
|
▷ (λ ((((_ , _) , p₂) , _) , _) → ⟦ I ⟧ p₂)
|
||||||
|
▷ (λ ((((_ , p₁) , _) , _) , (_ , i₁) , _) → μ D (p₁ , i₁))
|
||||||
|
▷ (λ ((((_ , _) , p₂) , _) , (_ , i₂) , _) → μ D (p₂ , i₂))
|
||||||
|
|
||||||
|
|
||||||
|
Apart from the typical clumsiness of dealing with substitutions, we have our parameters and indices.
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
Cₚ′ : {Γₚ : ExTele Pₚ}
|
||||||
|
{Γ : ExTele P}
|
||||||
|
→ (C : ConDesc Γ I)
|
||||||
|
→ (c₁ : {p : ⟦ Pₚ ⟧ tt} → ⟦ Γₚ ⟧ p → ⟦ Γ ⟧ (p₁ p))
|
||||||
|
→ (c₂ : {p : ⟦ Pₚ ⟧ tt} → ⟦ Γₚ ⟧ p → ⟦ Γ ⟧ (p₂ p))
|
||||||
|
→ (f₁ : ⟦ C ⟧ᶜ (μ D) ({!!} , {!!}) → μ D ({!!} , {!!}))
|
||||||
|
→ (ConDesc Γₚ Iₚ → ConDesc ε Iₚ)
|
||||||
|
→ ConDesc ε Iₚ
|
||||||
|
|
||||||
|
Cₚ′ (κ f) g₁ g₂ _ tie =
|
||||||
|
tie (κ (λ (p , g) → (((tt , ( f (p₁ p , g₁ g)))
|
||||||
|
, f (p₂ p , g₂ g))
|
||||||
|
, {!!}) , {!!}))
|
||||||
|
|
||||||
|
Cₚ′ (ι f C) c₁ c₂ _ tie =
|
||||||
|
Cₚ′ C
|
||||||
|
(c₁ ∘ λ ((p , _) , _) → p)
|
||||||
|
(c₂ ∘ λ ((p , _) , _) → p)
|
||||||
|
{!!}
|
||||||
|
(tie ∘ λ C → σ (λ (p , g) → μ D (p₁ p , f (p₁ p , c₁ g)))
|
||||||
|
$ σ (λ (p , g , _) → μ D (p₂ p , f (p₂ p , c₂ g)))
|
||||||
|
$ ι (λ (p , (_ , x) , y) → (((tt , _) , _) , x) , y)
|
||||||
|
$ C)
|
||||||
|
|
||||||
|
Cₚ′ (σ S C) c₁ c₂ _ tie =
|
||||||
|
Cₚ′ C
|
||||||
|
(λ (((g , s₁) , _) , _) → c₁ g , s₁)
|
||||||
|
(λ (((g , _) , s₂) , _) → c₂ g , s₂)
|
||||||
|
{!!}
|
||||||
|
(tie ∘ λ C → σ (λ (p , g) → S (p₁ p , c₁ g))
|
||||||
|
$ σ (λ (p , g , _) → S (p₂ p , c₂ g))
|
||||||
|
$ σ {!!} -- we need some some relation here
|
||||||
|
$ C)
|
||||||
|
|
||||||
|
Cₚ : ConDesc ε I → ConDesc ε Iₚ
|
||||||
|
Cₚ C = Cₚ′ C id id {!!} id
|
||||||
|
|
||||||
|
Dₚ : Desc Pₚ Iₚ n
|
||||||
|
Dₚ = map Cₚ D
|
||||||
|
|
||||||
|
CPS all the way.
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
Assuming we had implement what's missing above, we would have the relation we wanted::
|
||||||
|
|
||||||
|
⟦_⟧ₚ : {p₁ p₂ : ⟦ P ⟧ tt} (pᵣ : ₜ⟦ P ⟧ₚ HP p₁ p₂)
|
||||||
|
{i₁ : ⟦ I ⟧ p₁} {i₂ : ⟦ I ⟧ p₂}
|
||||||
|
→ Rel (μ D (p₁ , i₁)) (μ D (p₂ , i₂))
|
||||||
|
⟦_⟧ₚ pᵣ t₁ t₂ = μ Dₚ ((_ , pᵣ) , (_ , t₁) , t₂)
|
||||||
|
|
||||||
|
The abstraction theorem becomes::
|
||||||
|
|
||||||
|
-- param : ∀ {p i} (t : μ D (p , i)) → ⟦_⟧ₚ {!!} {!!} t t
|
||||||
|
-- param = {!!}
|
||||||
|
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
An exemple on lists:
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
Conclusion
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
Now, why should we care? Well, I don't really know just yet.
|
||||||
|
I do think there is value in having such transformations implemented in a type-safe way.
|
||||||
|
Still, because these representations are not intrinsic to how datatypes are defined in the theory,
|
||||||
|
it make for a clumsy experience. Add the lack of cumulativity and everything becomes quite tedious.
|
||||||
|
|
||||||
|
Things I would like to explore next:
|
||||||
|
|
||||||
|
- Derive the proofs that any datatype and its constructors are univalently parametric.
|
||||||
|
As of now we can derive the relation, what remains to be proven is deriving an equivalence (easy)
|
||||||
|
and a proof that the two are equivalent.
|
||||||
|
|
||||||
|
- Get rid of the :code:`--type-in-type` flag, using Effectfully's technique, or embracing :code:`--cumulativity`?
|
||||||
|
|
||||||
|
- Investigate encodings of other constructions: Co-inductive types? Records?
|
||||||
|
|
||||||
|
- Look into Formality's encoding of datatypes and what it means for generic programming.
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
.. [Bernardy2010] | **Parametricity and Dependent Types**
|
||||||
|
| Jean-Philippe Bernardy, Patrik Jansson, Ross Paterson
|
||||||
|
| http://www.staff.city.ac.uk/~ross/papers/pts.pdf
|
||||||
|
|
||||||
|
.. [Tabareau2019] | **The Marriage of Univalence and Parametricity**
|
||||||
|
| Nicolas Tabareau, Eric Tanter, Matthieu Sozeau
|
||||||
|
| https://arxiv.org/pdf/1909.05027.pdf
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
An incomplete list of projects I have worked on.
|
|
@ -0,0 +1,540 @@
|
||||||
|
---
|
||||||
|
title: achille
|
||||||
|
subtitle: A Haskell library for building static site generators
|
||||||
|
year: "2020"
|
||||||
|
labels:
|
||||||
|
repo: flupe/achille
|
||||||
|
license: MIT
|
||||||
|
---
|
||||||
|
|
||||||
|
**achille** [aʃil] is a tiny Haskell library for building your very own **static site
|
||||||
|
generator**. It is in spirit a direct successor to [Hakyll][Hakyll].
|
||||||
|
|
||||||
|
## Motivation
|
||||||
|
|
||||||
|
Static site generators (SSG) have proven to be very useful tools for easily
|
||||||
|
generating static websites from neatly organised content files. Most of them
|
||||||
|
support using **markup languages** like markdown for writing content, and offer
|
||||||
|
**incremental compilation** so that updating a website stays **fast**,
|
||||||
|
regardless of its size. However, most SSGs are very opinionated about how you
|
||||||
|
should manage your content. As soon as your specific needs deviate slightly
|
||||||
|
from what your SSG supports, it becomes a lot more tedious.
|
||||||
|
|
||||||
|
This leads to many people writing their own personal static site generators
|
||||||
|
from scratch. This results in a completely personalised workflow, but without
|
||||||
|
good libraries it is a time-consuming endeavor, and incremental compilation is often
|
||||||
|
out of the equation as it is hard to get right.
|
||||||
|
|
||||||
|
This is where **achille** and [Hakyll][Hakyll] come in: they provide a *domain
|
||||||
|
specific language* embedded in Haskell to easily yet very precisely describe
|
||||||
|
how to build your site. Compile this description and **you get a full-fledged
|
||||||
|
static site generator with incremental compilation**, tailored specifically to
|
||||||
|
your needs.
|
||||||
|
|
||||||
|
[Hakyll]: https://jaspervdj.be/hakyll
|
||||||
|
|
||||||
|
### Why Hakyll is not enough
|
||||||
|
|
||||||
|
To provide incremental compilation, Hakyll relies on a global store, in which
|
||||||
|
all your *intermediate values* are stored. It is *your* responsibility to
|
||||||
|
populate it with *snapshots*. There are some severe limitations to this
|
||||||
|
approach:
|
||||||
|
|
||||||
|
- The store is **fundamentally untyped**, so **retrieving snapshots may fail at
|
||||||
|
runtime** if you're not careful when writing your build rules. You may
|
||||||
|
argue that's not very critical --- I think it shouldn't be possible in the
|
||||||
|
first place. We are using a strongly typed language, so we shouldn't have
|
||||||
|
to rely on flaky coercions at runtime to manipulate intermediate values.
|
||||||
|
|
||||||
|
- **Loading snapshots with glob patterns is awkward**. With Hakyll, *the*
|
||||||
|
way to retrieve intermediate values is by querying the store,
|
||||||
|
using glob patterns. This indirect way of managing values is very
|
||||||
|
clumsy. In Haskell, the very purpose of variables is to store intermediate
|
||||||
|
values, so we should only have to deal with plain old variables.
|
||||||
|
|
||||||
|
- **Dependencies are not explicit**. Because it relies on a global store for
|
||||||
|
handling intermediate values, Hakyll has to make sure that the snaphots you
|
||||||
|
want to load have been generated already. And because rules have no imposed
|
||||||
|
order despite implicit inter-dependencies, Hakyll has to evaluate very
|
||||||
|
carefully each rule, eventually pausing them to compute missing dependencies.
|
||||||
|
This is very complex and quite frankly impressive, yet I believe we can strive
|
||||||
|
for a simpler model of evaluation. If we used plain old variables to hold
|
||||||
|
intermediate values, we simply would not be allowed to refer to an undefined
|
||||||
|
variable.
|
||||||
|
|
||||||
|
There are other somewhat debatable design decisions:
|
||||||
|
|
||||||
|
- In Hakyll, every rule will produce an output file, and only one, if you're
|
||||||
|
restricting yourself to the API they provide. I argue
|
||||||
|
such a library should not care whether a rule produces any output on the
|
||||||
|
filesystem. Its role is merely to know *if the rule must be executed*. Because of
|
||||||
|
this requirement, producing multiple outputs from the same file is a tad
|
||||||
|
cumbersome.
|
||||||
|
- Because Hakyll stores many content files directly in the store, the resulting
|
||||||
|
cache is *huge*. This is unnecessary, the files are right here in the content
|
||||||
|
directory.
|
||||||
|
- Hakyll uses a *lot* of abstractions --- `Compiler`, `Item`, `Rule`, `RuleSet`
|
||||||
|
--- whose purpose is not obvious to a newcomer.
|
||||||
|
- It defines monads to allow the convenient `do` notation to be used, but
|
||||||
|
disregards completely the very benefit of using monads --- it composes!
|
||||||
|
|
||||||
|
### Other tools
|
||||||
|
|
||||||
|
As always when thinking I am onto something, I jumped straight into code
|
||||||
|
and forgot to check whether there were alternatives. By fixating on Hakyll, I did not
|
||||||
|
realize many people have had the same comments about the shortcomings of Hakyll
|
||||||
|
and improved upon it. Therefore, it's only after building most of **achille**
|
||||||
|
in a week that I realized there were many
|
||||||
|
other similar tools available, namely: [rib][rib], [slick][slick], [Pencil][pencil] &
|
||||||
|
[Lykah][lykah].
|
||||||
|
|
||||||
|
[rib]: https://rib.srid.ca/
|
||||||
|
[slick]: https://hackage.haskell.org/package/slick
|
||||||
|
[pencil]: http://elbenshira.com/pencil/
|
||||||
|
[lykah]: https://hackage.haskell.org/package/Lykah
|
||||||
|
|
||||||
|
Fortunately, I still believe **achille** is a significant improvement over these libraries.
|
||||||
|
|
||||||
|
- As far as I can tell, **pencil** does not provide incremental generation.
|
||||||
|
It also relies on a global store, no longer untyped but very
|
||||||
|
restrictive about what you can store. It implements its own templating language.
|
||||||
|
- Likewise, no incremental generation in **Lykah**.
|
||||||
|
Reimplements its own HTML DSL rather than use *lucid*.
|
||||||
|
Very opinionated, undocumented and unmaintained.
|
||||||
|
- **rib** and **slick** are the most feature-complete of the lot.
|
||||||
|
They both provide a minimalist web-focused interface over the very powerful build system
|
||||||
|
[Shake][Shake].
|
||||||
|
|
||||||
|
[Shake]: https://shakebuild.com/
|
||||||
|
|
||||||
|
## How achille works
|
||||||
|
|
||||||
|
In **achille** there is a single abstraction for reasoning about build rules:
|
||||||
|
`Recipe m a b`. A **recipe** of type `Recipe m a b` will produce a value of type
|
||||||
|
`m b` given some input of type `a`.
|
||||||
|
Conveniently, if `m` is a monad then **`Recipe m a` is a monad** too, so
|
||||||
|
you can retrieve the output of a recipe to reuse it in another recipe.
|
||||||
|
|
||||||
|
*(Because of caching, a recipe is **not** just a Kleisli arrow)*
|
||||||
|
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
-- the (>>=) operator, restricted to recipes
|
||||||
|
(>>=) :: Monad m => Recipe m a b -> (b -> Recipe m a c) -> Recipe m a c
|
||||||
|
```
|
||||||
|
|
||||||
|
With only this, **achille** tackles every single one of the limitations highlighted above.
|
||||||
|
|
||||||
|
- Intermediate values are plain old Haskell variables.
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
renderPost :: Recipe IO FilePath Post
|
||||||
|
buildPostIndex :: [Post] -> Recipe a ()
|
||||||
|
|
||||||
|
renderPosts :: Task IO ()
|
||||||
|
renderPosts = do
|
||||||
|
posts <- match "posts/*" renderPost
|
||||||
|
buildPostIndex posts
|
||||||
|
```
|
||||||
|
|
||||||
|
See how a correct ordering of build rules is enforced by design: you can only
|
||||||
|
use an intermediate value once the recipe it is originating from has been
|
||||||
|
executed.
|
||||||
|
|
||||||
|
Note: a **task** is a recipe that takes no input.
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
type Task m = Recipe m ()
|
||||||
|
```
|
||||||
|
|
||||||
|
- **achille** does not care what happens during the execution of a recipe.
|
||||||
|
It only cares about the input and return type of the recipe --- that is, the
|
||||||
|
type of intermediate values.
|
||||||
|
In particullar, **achille** does not expect every recipe to produce a file,
|
||||||
|
and lets you decide when to actually write on the filesystem.
|
||||||
|
|
||||||
|
For example, it is very easy to produce multiple versions of a same source file:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
renderPage :: Recipe IO FilePath FilePath
|
||||||
|
renderPage = do
|
||||||
|
-- Copy the input file as is to the output directory
|
||||||
|
copyFile
|
||||||
|
|
||||||
|
-- Render the input file with pandoc,
|
||||||
|
-- then save it to the output dir with extension ".html"
|
||||||
|
compilePandoc >>= saveTo (-<.> "html")
|
||||||
|
```
|
||||||
|
|
||||||
|
Once you have defined the recipe for building your site, you forward
|
||||||
|
this description to **achille** in order to get a command-line interface for
|
||||||
|
your generator, just as you would using Hakyll:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
buildSite :: Task IO ()
|
||||||
|
|
||||||
|
main :: IO ()
|
||||||
|
main = achille buildSite
|
||||||
|
```
|
||||||
|
|
||||||
|
Assuming we compiled the file above into an executable called `site`, running
|
||||||
|
it gives the following output:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ site
|
||||||
|
A static site generator for fun and profit
|
||||||
|
|
||||||
|
Usage: site COMMAND
|
||||||
|
|
||||||
|
Available options:
|
||||||
|
-h,--help Show this help text
|
||||||
|
|
||||||
|
Available commands:
|
||||||
|
build Build the site once
|
||||||
|
deploy Server go brrr
|
||||||
|
clean Delete all artefacts
|
||||||
|
```
|
||||||
|
|
||||||
|
That's it, you now have your very own static site generator!
|
||||||
|
|
||||||
|
### Caching
|
||||||
|
|
||||||
|
So far we haven't talked about caching and incremental builds.
|
||||||
|
Rest assured: **achille produces generators with robust incremental
|
||||||
|
builds** for free. To understand how this is done, we can simply look at the
|
||||||
|
definition of `Recipe m a b`:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
-- the cache is simply a lazy bytestring
|
||||||
|
type Cache = ByteString
|
||||||
|
|
||||||
|
newtype Recipe m a b = Recipe (Context a -> m (b, Cache))
|
||||||
|
```
|
||||||
|
|
||||||
|
In other words, when a recipe is run, it is provided a **context** containing
|
||||||
|
the input value, **a current cache** *local* to the recipe, and some more
|
||||||
|
information. The IO action is executed, and we update the local cache with the
|
||||||
|
new cache returned by the recipe. We say *local* because of how composition of
|
||||||
|
recipes is handled internally. When the *composition* of two recipes (made with
|
||||||
|
`>>=` or `>>`) is being run, we retrieve two bytestrings from the local cache
|
||||||
|
and feed them as local cache to both recipes respectively. Then we gather the two updated
|
||||||
|
caches, join them and make it the new cache of the composition.
|
||||||
|
|
||||||
|
This way, a recipe is guaranteed to receive the same local cache it returned
|
||||||
|
during the last run, *untouched by other recipes*. And every recipe is free to
|
||||||
|
dispose of this local cache however it wants.
|
||||||
|
|
||||||
|
As a friend noted, **achille** is "just a library for composing memoized
|
||||||
|
computations".
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
#### High-level interface
|
||||||
|
|
||||||
|
Because we do not want the user to carry the burden of updating the cache
|
||||||
|
manually, **achille** comes with many utilies for common operations, managing
|
||||||
|
the cache for us under the hood. Here is an exemple highlighting how we keep
|
||||||
|
fine-grained control over the cache at all times, while never having to
|
||||||
|
manipulate it directly.
|
||||||
|
|
||||||
|
Say you want to run a recipe for every file maching a glob pattern, *but do
|
||||||
|
not care about the output of the recipe*. A typical exemple would be to copy
|
||||||
|
every static asset of your site to the output directory. **achille** provides
|
||||||
|
the `match_` function for this very purpose:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
match_ :: Glob.Pattern -> Recipe FilePath b -> Recipe a ()
|
||||||
|
```
|
||||||
|
|
||||||
|
We would use it in this way:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
copyAssets :: Recipe a ()
|
||||||
|
copyAssets = match_ "assets/*" copyFile
|
||||||
|
|
||||||
|
main :: IO ()
|
||||||
|
main = achille copyAssets
|
||||||
|
```
|
||||||
|
|
||||||
|
Under the hood, `match_ p r` will cache every filepath for which the recipe was
|
||||||
|
run. During the next run, for every filepath matching the pattern, `match_ p r` will
|
||||||
|
lookup the path in its cache. If it is found and hasn't been modified since,
|
||||||
|
then we do nothing for this path. Otherwise, the task is run and the filepath
|
||||||
|
added to the cache.
|
||||||
|
|
||||||
|
Now assume we do care about the output of the recipe we want to run on every filepath.
|
||||||
|
For example if we compile every blogpost, we want to retrieve each blogpost's title and
|
||||||
|
the filepath of the compiled `.html` file. In that case, we can use the
|
||||||
|
built-in `match` function:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
match :: Binary b
|
||||||
|
=> Glob.Pattern -> Recipe FilePath b -> Recipe a [b]
|
||||||
|
```
|
||||||
|
|
||||||
|
Notice the difference here: we expect the type of the recipe output `b` to have
|
||||||
|
an instance of `Binary`, **so that we can encode it in the cache**. Fortunately,
|
||||||
|
many of the usual Haskell types have an instance available. Then we can do:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
data PostMeta = PostMeta { title :: Text }
|
||||||
|
renderPost :: Text -> Text -> Text
|
||||||
|
renderIndex :: [(Text, FilePath)] -> Text
|
||||||
|
|
||||||
|
buildPost :: Recipe FilePath (Text, FilePath)
|
||||||
|
buildPost = do
|
||||||
|
(PostMeta title, pandoc) <- compilePandocMeta
|
||||||
|
renderPost title pdc & saveAs (-<.> "html")
|
||||||
|
<&> (title,)
|
||||||
|
|
||||||
|
buildPost :: Recipe a [(Text, FilePath)]
|
||||||
|
buildPosts = match "posts/*.md" buildPost
|
||||||
|
|
||||||
|
buildIndex :: [(Text, FilePath)] -> Recipe
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Shortcomings
|
||||||
|
|
||||||
|
The assertion *"A recipe will always receive the same cache between two runs"*
|
||||||
|
can only violated in the two following situations:
|
||||||
|
|
||||||
|
- There is **conditional branching in your recipes**, and more specifically,
|
||||||
|
**branching for which the branch taken can differ between runs**.
|
||||||
|
|
||||||
|
For example, it is **not** problematic to do branching on the extension of a file,
|
||||||
|
as the same path will be taken each execution.
|
||||||
|
|
||||||
|
But assuming you want to parametrize by some boolean value for whatever reason,
|
||||||
|
whose value you may change between runs, then because the two branches will
|
||||||
|
share the same cache, every time the boolean changes, the recipe will start
|
||||||
|
from an inconsistent cache so it will recompute from scratch, and overwrite
|
||||||
|
the existing cache.
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
buildSection :: Bool -> Task IO ()
|
||||||
|
buildSection isProductionBuild =
|
||||||
|
if isProductionBuild then
|
||||||
|
someRecipe
|
||||||
|
else
|
||||||
|
someOtherRecipe
|
||||||
|
```
|
||||||
|
|
||||||
|
Although I expect few people ever do this kind of conditional branching for
|
||||||
|
generating a static site, **achille** still comes with combinators for branching.
|
||||||
|
You can use `if` in order to keep two separate caches for the two branches:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
if :: Bool -> Recipe m a b -> Recipe m a b -> Recipe m a b
|
||||||
|
```
|
||||||
|
|
||||||
|
The previous example becomes:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
buildSection :: Bool -> Task IO ()
|
||||||
|
buildSection isProductionBuild =
|
||||||
|
Achille.if isProductionBuild
|
||||||
|
someRecipe
|
||||||
|
someOtherRecipe
|
||||||
|
```
|
||||||
|
|
||||||
|
### No runtime failures
|
||||||
|
|
||||||
|
All the built-in cached recipes **achille** provides are implemented carefully
|
||||||
|
so that **they never fail in case of cache corruption**. That is, in the
|
||||||
|
eventuality of failing to retrieve the desired values from the cache, our
|
||||||
|
recipes will automatically recompute the result from the input, ignoring the
|
||||||
|
cache entirely. To make sure this is indeed what happens, every cached recipe
|
||||||
|
in **achille** has been tested carefully (not yet really, but it is on the todo
|
||||||
|
list).
|
||||||
|
|
||||||
|
This means the only failures possible are those related to poor content
|
||||||
|
formatting from the user part: missing frontmatter fields, watching files
|
||||||
|
that do not exist, etc. All of those are errors are gracefully reported to the
|
||||||
|
user.
|
||||||
|
|
||||||
|
### Parallelism
|
||||||
|
|
||||||
|
**achille** could very easily support parallelism for free, I just didn't take
|
||||||
|
the time to make it a reality.
|
||||||
|
|
||||||
|
|
||||||
|
## Making a blog from scratch
|
||||||
|
|
||||||
|
Let's see how to use **achille** for making a static site generator for a blog.
|
||||||
|
First we decide what will be the structure of our source directory.
|
||||||
|
We choose the following:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
content
|
||||||
|
└── posts
|
||||||
|
├── 2020-04-13-hello-world.md
|
||||||
|
├── 2020-04-14-another-article.md
|
||||||
|
└── 2020-05-21-some-more.md
|
||||||
|
```
|
||||||
|
|
||||||
|
We define the kind of metadata we want to allow in the frontmatter header
|
||||||
|
of our markdown files:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
{-# LANGUAGE DeriveGeneric #-}
|
||||||
|
|
||||||
|
import GHC.Generics
|
||||||
|
import Data.Aeson
|
||||||
|
import Data.Text (Text)
|
||||||
|
|
||||||
|
data Meta = Meta
|
||||||
|
{ title :: Text
|
||||||
|
} deriving (Generic)
|
||||||
|
|
||||||
|
instance FromJSON Meta
|
||||||
|
```
|
||||||
|
|
||||||
|
This way we enfore correct metadata when retrieving the content of our files.
|
||||||
|
Every markdown file will have to begin with the following header for our
|
||||||
|
generator to proceed:
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
---
|
||||||
|
title: Something about efficiency
|
||||||
|
---
|
||||||
|
```
|
||||||
|
|
||||||
|
Then we create a generic template for displaying a page, thanks to lucid:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# LANGUAGE BlockArguments #-}
|
||||||
|
|
||||||
|
import Lucid.Html5
|
||||||
|
|
||||||
|
renderPost :: Text -> Text -> Html a
|
||||||
|
renderPost title content = wrapContent do
|
||||||
|
h1_ $ toHtml title
|
||||||
|
toHtmlRaw content
|
||||||
|
|
||||||
|
renderIndex :: [(Text, FilePath)] -> Html a
|
||||||
|
renderIndex = wrapContent .
|
||||||
|
ul_ . mconcat . map \(title, path) ->
|
||||||
|
li_ $ a_ [href_ path] $ toHtml title
|
||||||
|
|
||||||
|
wrapContent :: Html a -> Html a
|
||||||
|
wrapContent content = doctypehtml_ do
|
||||||
|
head_ do
|
||||||
|
meta_ [charset_ "utf-8"]
|
||||||
|
title_ "my very first blog"
|
||||||
|
|
||||||
|
body_ do
|
||||||
|
header_ $ h1_ "BLOG"
|
||||||
|
content_
|
||||||
|
```
|
||||||
|
|
||||||
|
We define a recipe for rendering every post:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
buildPosts :: Task IO [(String, FilePath)]
|
||||||
|
buildPosts =
|
||||||
|
match "posts/*.md" do
|
||||||
|
(Meta title, text) <- compilePandocMetadata
|
||||||
|
saveFileAs (-<.> "html") (renderPost title text)
|
||||||
|
<&> (title,)
|
||||||
|
```
|
||||||
|
|
||||||
|
We can define a simple recipe for rendering the index, given a list of posts:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
buildIndex :: [(Text, FilePath)] -> Task IO FilePath
|
||||||
|
buildIndex posts =
|
||||||
|
save (renderIndex posts) "index.html"
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, it's only a matter of composing the recipes and giving them to **achille**:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
main :: IO ()
|
||||||
|
main = achille do
|
||||||
|
posts <- buildPosts
|
||||||
|
buildIndex posts
|
||||||
|
```
|
||||||
|
|
||||||
|
And that's it, you now have a very minimalist incremental blog generator!
|
||||||
|
|
||||||
|
## Recursive recipes
|
||||||
|
|
||||||
|
It is very easy to define recursive recipes in **achille**. This allows us to
|
||||||
|
traverse and build tree-like structures, such as wikis.
|
||||||
|
|
||||||
|
For example, given the following structure:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
content
|
||||||
|
├── index.md
|
||||||
|
├── folder1
|
||||||
|
│ └── index.md
|
||||||
|
└── folder2
|
||||||
|
├── index.md
|
||||||
|
├── folder21
|
||||||
|
│ └── index.md
|
||||||
|
├── folder22
|
||||||
|
│ └── index.md
|
||||||
|
└── folder23
|
||||||
|
├── index.md
|
||||||
|
├── folder231
|
||||||
|
│ └── index.md
|
||||||
|
├── folder222
|
||||||
|
│ └── index.md
|
||||||
|
└── folder233
|
||||||
|
└── index.md
|
||||||
|
```
|
||||||
|
|
||||||
|
We can generate a site with the same structure and in which each index page has
|
||||||
|
links to its children:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
renderIndex :: PageMeta -> [(PageMeta, FilePath)] -> Text -> Html
|
||||||
|
|
||||||
|
buildIndex :: Recipe IO a (PageMeta, FilePath)
|
||||||
|
buildIndex = do
|
||||||
|
children <- walkDir
|
||||||
|
|
||||||
|
matchFile "index.*" do
|
||||||
|
(meta, text) <- compilePandoc
|
||||||
|
renderIndex meta children text >>= save (-<.> "html")
|
||||||
|
return $ (meta,) <$> getInput
|
||||||
|
|
||||||
|
walkDir :: Recipe IO a [(PageMeta, FilePath)]
|
||||||
|
walkDir = matchDir "*/" buildIndex
|
||||||
|
|
||||||
|
main :: IO ()
|
||||||
|
main = achille buildIndex
|
||||||
|
```
|
||||||
|
|
||||||
|
## Forcing the regeneration of output
|
||||||
|
|
||||||
|
Currently, **achille** doesn't track what files a recipe produces in the output
|
||||||
|
dir. This means you cannot ask for things like *"Please rebuild
|
||||||
|
output/index.html"*.
|
||||||
|
|
||||||
|
That's because we make the assumption that the output dir is untouched between
|
||||||
|
builds. The only reason I can think of for wanting to rebuild a specific page
|
||||||
|
is if the template used to generate it has changed.
|
||||||
|
But in that case, the template is *just another input*.
|
||||||
|
So you can treat it as such by putting it in your content directory and doing
|
||||||
|
the following:
|
||||||
|
|
||||||
|
```haskell
|
||||||
|
import Templates.Index (renderIndex)
|
||||||
|
|
||||||
|
buildIndex :: Task IO ()
|
||||||
|
buildIndex =
|
||||||
|
watchFile "Templates/Index.hs" $ match_ "index.*" do
|
||||||
|
compilePandoc <&> renderIndex >>= write "index.html"
|
||||||
|
```
|
||||||
|
|
||||||
|
This way, **achille** will automatically rebuild your index if the template has
|
||||||
|
changed!
|
||||||
|
|
||||||
|
While writing these lines, I realized it would be very easy for **achille**
|
||||||
|
to know which recipe produced which output file,
|
||||||
|
so I might just add that. Still, it would still require you to ask for an output
|
||||||
|
file to be rebuilt if a template has changed. With the above pattern, it is
|
||||||
|
handled automatically!
|
|
@ -0,0 +1,4 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<svg viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg" version="1.1" height="20px" width="20px">
|
||||||
|
<path d="M 18,2 A 5,5 0 0 0 13,7 L 13, 13 A 5, 5 0 0 0 18,17 M 13,7 A 5,5 0 0 0 8,2 L 7,2 A 5,5 0 0 0 2,7 L 2,12 A 5, 5 0 0 0 7,17 L 10,17" style="stroke-width: 1; stroke-linecap: round; stroke-linejoin: bevel; stroke: #4c566a" fill="none"></path>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 414 B |
|
@ -0,0 +1,14 @@
|
||||||
|
---
|
||||||
|
title: jibniz
|
||||||
|
subtitle: A javascript implementation of the IBNIZ VM
|
||||||
|
year: "2017"
|
||||||
|
labels:
|
||||||
|
repo: flupe/jibniz
|
||||||
|
license: MIT
|
||||||
|
---
|
||||||
|
|
||||||
|
**jibniz** is a javascript implementation of the IBNIZ virtual machine.
|
||||||
|
IBNIZ is an esoteric stack-based programming language created by _, in which almost every
|
||||||
|
instruction is one character long. The code is intended to be ran for each
|
||||||
|
pixel on a 256x256 screen, at every frame, thus making it possible to produce
|
||||||
|
animations.
|
|
@ -0,0 +1,11 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<svg viewBox="-10 -10 20 20" xmlns="http://www.w3.org/2000/svg" version="1.1" height="20px" width="20px">
|
||||||
|
<!-- <path d="M 18,2 A 5,5 0 0 0 13,7 L 13, 13 A 5, 5 0 0 0 18,17 M 13,7 A 5,5 0 0 0 8,2 L 7,2 A 5,5 0 0 0 2,7 L 2,12 A 5, 5 0 0 0 7,17 L 10,17" style="stroke-width: 1; stroke-linecap: round; stroke-linejoin: bevel; stroke: #4c566a" fill="none"></path>-->
|
||||||
|
<rect stroke="#4c566a" fill="none" id="r1" width="16" height="16" x="-8" y="-8"/>
|
||||||
|
<use id="r2" href="#r1" transform="rotate(-20 0 0) scale(.78)"/>
|
||||||
|
<use id="r3" href="#r2" transform="rotate(-20 0 0) scale(.78)"/>
|
||||||
|
<use id="r4" href="#r3" transform="rotate(-20 0 0) scale(.78)"/>
|
||||||
|
<use id="r5" href="#r4" transform="rotate(-20 0 0) scale(.78)"/>
|
||||||
|
<use id="r6" href="#r5" transform="rotate(-20 0 0) scale(.78)"/>
|
||||||
|
<use id="r7" href="#r6" transform="rotate(-20 0 0) scale(.78)"/>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 912 B |
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/ff/XP/SHA256E-s570674--624061c08eeaa784c26863a46887eb331b3d9b75730ff54e79022d201b0aba5b.webp/SHA256E-s570674--624061c08eeaa784c26863a46887eb331b3d9b75730ff54e79022d201b0aba5b.webp
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/v4/ZX/SHA256E-s753820--2d0d077162df828137767a288b56084a7c0f2740ae0f3955f174cff43f939345.webp/SHA256E-s753820--2d0d077162df828137767a288b56084a7c0f2740ae0f3955f174cff43f939345.webp
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/xQ/FG/SHA256E-s32056--3415fc7043b5a588c0398eee92d8649c45589275dbe4122d65a0871d3c7b243a.webp/SHA256E-s32056--3415fc7043b5a588c0398eee92d8649c45589275dbe4122d65a0871d3c7b243a.webp
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/ww/Fk/SHA256E-s68258--c833a29d544b2945da18f6dff7892357597a7109b191369b7a7d74a8b9cceb68.webp/SHA256E-s68258--c833a29d544b2945da18f6dff7892357597a7109b191369b7a7d74a8b9cceb68.webp
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/W8/0F/SHA256E-s24484--933b0a89e41f3fc71b7fc9901fad6d8115696a9da2f086b9d92be89dd5e94e34.webp/SHA256E-s24484--933b0a89e41f3fc71b7fc9901fad6d8115696a9da2f086b9d92be89dd5e94e34.webp
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/f0/WJ/SHA256E-s1103278--a9e8837486e7f8a48b3032fa4c8a395532b974a5b194e7fe3de0bf8fb88a489b.webp/SHA256E-s1103278--a9e8837486e7f8a48b3032fa4c8a395532b974a5b194e7fe3de0bf8fb88a489b.webp
|
|
@ -0,0 +1,32 @@
|
||||||
|
---
|
||||||
|
title: troid
|
||||||
|
subtitle: Experiments with homemade 3d software rendering engines
|
||||||
|
year: "2014-2020"
|
||||||
|
labels: {}
|
||||||
|
---
|
||||||
|
|
||||||
|
I have a keen interest in building buggy software rendering engines.
|
||||||
|
A lot of prototypes were made but nothing came to fruition. Still, it makes for
|
||||||
|
pretty gifs.
|
||||||
|
|
||||||
|
.. raw:: html
|
||||||
|
|
||||||
|
<p id="stack" style="text-align:center; margin-top: 2em;">
|
||||||
|
<img src="moonlight.webp">
|
||||||
|
<img src="landscape.webp">
|
||||||
|
<img src="bunny.webp">
|
||||||
|
<img src="glitchy1.webp">
|
||||||
|
<img src="glitchy2.webp">
|
||||||
|
<img src="donnut.webp">
|
||||||
|
<img src="stretchy.webp">
|
||||||
|
<img src="table.webp">
|
||||||
|
<style>
|
||||||
|
#stack {
|
||||||
|
}
|
||||||
|
#stack img {
|
||||||
|
max-width: 100%;
|
||||||
|
vertical-align: middle;
|
||||||
|
margin: .5em 0;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</p>
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/Gq/mw/SHA256E-s563554--59cd2fb16e762687a28c0df2fb4dae281b82c4d9fac1355cb2d4bc9a22267c55.webp/SHA256E-s563554--59cd2fb16e762687a28c0df2fb4dae281b82c4d9fac1355cb2d4bc9a22267c55.webp
|
|
@ -0,0 +1,6 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<svg viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg" version="1.1" height="20px" width="20px">
|
||||||
|
<!-- <path d="M 18,2 A 5,5 0 0 0 13,7 L 13, 13 A 5, 5 0 0 0 18,17 M 13,7 A 5,5 0 0 0 8,2 L 7,2 A 5,5 0 0 0 2,7 L 2,12 A 5, 5 0 0 0 7,17 L 10,17" style="stroke-width: 1; stroke-linecap: round; stroke-linejoin: bevel; stroke: #4c566a" fill="none"></path>-->
|
||||||
|
<path d="M 2,18 L 10,8 L 18,18 Z M 2,12 L 10,2 L 18,12 Z
|
||||||
|
M 2,18 L 2,12 M 10,8 L 10,2 M 18,18 L 18, 12 " style="stroke-width: 1; stroke-linecap: round; stroke-linejoin: bevel; stroke: #4c566a" fill="none"></path>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 647 B |
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/6w/Wg/SHA256E-s43917--fb438c4fe0c2cf61162b3a53a7af152f73291dbaa922e34cbe8cdc241006298b.png/SHA256E-s43917--fb438c4fe0c2cf61162b3a53a7af152f73291dbaa922e34cbe8cdc241006298b.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/Q6/0V/SHA256E-s1065974--cdc36ff8a2fa55c2b855e7bcab3197847b626bd8376b07cfa78a6e11066e47d6.webp/SHA256E-s1065974--cdc36ff8a2fa55c2b855e7bcab3197847b626bd8376b07cfa78a6e11066e47d6.webp
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/5f/W7/SHA256E-s24652--504adab81bfa9a41fa9675a27acb531de3f7fd3a1f52d52300679d60c8480fea.webp/SHA256E-s24652--504adab81bfa9a41fa9675a27acb531de3f7fd3a1f52d52300679d60c8480fea.webp
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/5q/PG/SHA256E-s71466--fa933822cd53d7504415bf3b7b8423e948050a65d0ad6613a1a92f11bd5331b8.webp/SHA256E-s71466--fa933822cd53d7504415bf3b7b8423e948050a65d0ad6613a1a92f11bd5331b8.webp
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/j1/93/SHA256E-s20554--0060e1e0f05c953435edfc0b8cf532ad5b88817211b6891ccdaea068a98ae7e7.webp/SHA256E-s20554--0060e1e0f05c953435edfc0b8cf532ad5b88817211b6891ccdaea068a98ae7e7.webp
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/8W/vW/SHA256E-s212158--3852e4dbb22e545ae6083d51e91faae8e9b65180d9881ca6cd6154351e2f6566.webp/SHA256E-s212158--3852e4dbb22e545ae6083d51e91faae8e9b65180d9881ca6cd6154351e2f6566.webp
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/1x/XX/SHA256E-s74196--489b0f5c054886b15db4b4e49fab8c9c42d08acb58596de539a6670c9d30deed.webp/SHA256E-s74196--489b0f5c054886b15db4b4e49fab8c9c42d08acb58596de539a6670c9d30deed.webp
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/gX/5q/SHA256E-s10462--06c81e857783d02e5886f369ff3db602cf6bdd573fd6fe2956992e87668a1973.webp/SHA256E-s10462--06c81e857783d02e5886f369ff3db602cf6bdd573fd6fe2956992e87668a1973.webp
|
|
@ -0,0 +1,14 @@
|
||||||
|
This site is under construction, please be kind.
|
||||||
|
|
||||||
|
-----
|
||||||
|
|
||||||
|
All content on this website is licensed under `CC BY-NC 2.0`_ unless stated
|
||||||
|
otherwise. In other words, you are free to copy, redistribute and edit this
|
||||||
|
content, provided you: give appropriate credit; indicate where changes were made
|
||||||
|
and do not do so for commercial purposes.
|
||||||
|
|
||||||
|
This website is self-hosted on a OneProvider
|
||||||
|
dedicated server in Paris. The domain name `acatalepsie.fr <https://acatalepsie.fr>`_ has
|
||||||
|
been registered at `gandi.net <https://gandi.net>`_.
|
||||||
|
|
||||||
|
.. _CC BY-NC 2.0: https://creativecommons.org/licenses/by-nc/2.0/
|
|
@ -0,0 +1 @@
|
||||||
|
Some visual work I've been doing through the years. Some of it is bad, some not as much.
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/gZ/XG/SHA256E-s184840--08fe7d08c0d946d119eef787f31d0f28224fdfa001975450e83b66d7691072da.jpg/SHA256E-s184840--08fe7d08c0d946d119eef787f31d0f28224fdfa001975450e83b66d7691072da.jpg
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/k3/30/SHA256E-s631729--32753a717b3349dfdce80e732b32e53de3eb4361e8f77c76258b663c9e8cfa32.png/SHA256E-s631729--32753a717b3349dfdce80e732b32e53de3eb4361e8f77c76258b663c9e8cfa32.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/4p/V6/SHA256E-s918970--e7a695c389a762320704a52f5390c37a162dae81ff2df614ce38f40df03be73d.png/SHA256E-s918970--e7a695c389a762320704a52f5390c37a162dae81ff2df614ce38f40df03be73d.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/J5/jZ/SHA256E-s1346919--a6f9e1df5e2593fa1a868244769f16522defbf4bd2c38e0c9014e379d93fdd68.png/SHA256E-s1346919--a6f9e1df5e2593fa1a868244769f16522defbf4bd2c38e0c9014e379d93fdd68.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/wj/7m/SHA256E-s299494--9826507fe82e22161368e3b8008aab5df58d6f4fdb8b29c4f47279faa3a35f74.png/SHA256E-s299494--9826507fe82e22161368e3b8008aab5df58d6f4fdb8b29c4f47279faa3a35f74.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/9J/X7/SHA256E-s670715--d1841af7719367ad43becc186af1d3c520b775bb2bf54da2cb92c135911dbbfb.png/SHA256E-s670715--d1841af7719367ad43becc186af1d3c520b775bb2bf54da2cb92c135911dbbfb.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/wp/jK/SHA256E-s321424--297046f12e2941743a19183faaaa8ebd55428367bc0ee2f457c467495481423c.png/SHA256E-s321424--297046f12e2941743a19183faaaa8ebd55428367bc0ee2f457c467495481423c.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/m1/gJ/SHA256E-s1364718--8fdba85d9550064e2acab434042fcc0aeb376563516a8bf9ad3e2244db00021c.png/SHA256E-s1364718--8fdba85d9550064e2acab434042fcc0aeb376563516a8bf9ad3e2244db00021c.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/gF/87/SHA256E-s458925--e06294cd0c55b73f4ff119ac3299d4f96139ede37ae2fb18110a126b48eede91.jpg/SHA256E-s458925--e06294cd0c55b73f4ff119ac3299d4f96139ede37ae2fb18110a126b48eede91.jpg
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/Q6/MF/SHA256E-s1949616--8f6aee3703fcec5d6e16724c39ad938d26e96b477855b9e05de180bad5bf6e51.png/SHA256E-s1949616--8f6aee3703fcec5d6e16724c39ad938d26e96b477855b9e05de180bad5bf6e51.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/86/p5/SHA256E-s8693437--b5cde85117ffee4fc0ef1f9ae4a9887c12dced028baca4ce76939efef5568b84.png/SHA256E-s8693437--b5cde85117ffee4fc0ef1f9ae4a9887c12dced028baca4ce76939efef5568b84.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/27/kP/SHA256E-s600318--d1e1ae2cef2879eae698498f3c4e3b4907a9f39b227b0c3816fe020e34df066c.png/SHA256E-s600318--d1e1ae2cef2879eae698498f3c4e3b4907a9f39b227b0c3816fe020e34df066c.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/FF/Fp/SHA256E-s2521385--1ad730393c6d1c8159c966019893fbfacfef84b64eef47a26b4d0869e0f8fd58.png/SHA256E-s2521385--1ad730393c6d1c8159c966019893fbfacfef84b64eef47a26b4d0869e0f8fd58.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/42/v4/SHA256E-s1446118--c14985cde38d539fe967254266bcacd35feb89fb9c2817e969ce916ae8967989.png/SHA256E-s1446118--c14985cde38d539fe967254266bcacd35feb89fb9c2817e969ce916ae8967989.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/1p/9X/SHA256E-s1411058--46562b966839842feae88561bdd7cd0e6106138ac17b79de7aa0228846d32aad.png/SHA256E-s1411058--46562b966839842feae88561bdd7cd0e6106138ac17b79de7aa0228846d32aad.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/91/mZ/SHA256E-s1841625--21ce2fad4fa27a77c23eeb8dd2d84ab9d3a5c4b130ef1d4687b6a2c0122d9f24.png/SHA256E-s1841625--21ce2fad4fa27a77c23eeb8dd2d84ab9d3a5c4b130ef1d4687b6a2c0122d9f24.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/j1/04/SHA256E-s202502--d067dc4336ca30160227a30c48722e6df50626c5f777f56b6b2a5e8767c26848.png/SHA256E-s202502--d067dc4336ca30160227a30c48722e6df50626c5f777f56b6b2a5e8767c26848.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/07/26/SHA256E-s919785--393de0aeef6648c5dd5faba83ec92d182d7cc827e5d75599d155c99a826e5c26.png/SHA256E-s919785--393de0aeef6648c5dd5faba83ec92d182d7cc827e5d75599d155c99a826e5c26.png
|
|
@ -0,0 +1 @@
|
||||||
|
../../../.git/annex/objects/xx/0f/SHA256E-s495379--3b920599ddf4ed84c19616f1bfd2cdc76a7a876785ebb2432b21e20085eaa2ea.png/SHA256E-s495379--3b920599ddf4ed84c19616f1bfd2cdc76a7a876785ebb2432b21e20085eaa2ea.png
|
|
@ -0,0 +1,35 @@
|
||||||
|
cabal-version: >=1.10
|
||||||
|
name: site
|
||||||
|
version: 0.1.0.0
|
||||||
|
author: flupe
|
||||||
|
maintainer: lucas@escot.me
|
||||||
|
build-type: Simple
|
||||||
|
optimization: 2
|
||||||
|
|
||||||
|
executable site
|
||||||
|
main-is: Main.hs
|
||||||
|
hs-source-dirs: src
|
||||||
|
other-modules: Templates
|
||||||
|
, Types
|
||||||
|
, Page
|
||||||
|
build-depends: base >=4.12 && <4.13
|
||||||
|
, filepath
|
||||||
|
, achille
|
||||||
|
, blaze-html
|
||||||
|
, blaze-markup
|
||||||
|
, pandoc
|
||||||
|
, pandoc-types
|
||||||
|
, text
|
||||||
|
, bytestring
|
||||||
|
, filepath
|
||||||
|
, frontmatter
|
||||||
|
, aeson
|
||||||
|
, yaml
|
||||||
|
, binary
|
||||||
|
, containers
|
||||||
|
, dates
|
||||||
|
other-modules: Templates
|
||||||
|
ghc-options: -threaded
|
||||||
|
-Wunused-imports
|
||||||
|
-j8
|
||||||
|
default-language: Haskell2010
|
|
@ -0,0 +1,83 @@
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# LANGUAGE BlockArguments #-}
|
||||||
|
{-# LANGUAGE TupleSections #-}
|
||||||
|
|
||||||
|
import Data.Functor ((<&>))
|
||||||
|
import Data.Maybe (fromMaybe, mapMaybe)
|
||||||
|
import System.FilePath ((-<.>))
|
||||||
|
import Text.Pandoc.Options
|
||||||
|
|
||||||
|
import Achille
|
||||||
|
import Achille.Recipe.Pandoc
|
||||||
|
|
||||||
|
import Page
|
||||||
|
import Templates
|
||||||
|
|
||||||
|
config :: Config
|
||||||
|
config = def
|
||||||
|
{ deployCmd = Just "rsync -avzzr _site/ --chmod=755 flupe@duckduck.me:/var/www/acatalepsie"
|
||||||
|
}
|
||||||
|
|
||||||
|
main :: IO ()
|
||||||
|
main = achilleWith config do
|
||||||
|
match_ "assets/*" copyFile
|
||||||
|
match_ "./quid.rst" $ compilePandoc <&> outer >>= saveFileAs (-<.> "html")
|
||||||
|
|
||||||
|
-----------
|
||||||
|
-- VISUAL
|
||||||
|
|
||||||
|
pictures <- match "visual/*/*" do
|
||||||
|
copyFile
|
||||||
|
runCommandWith (-<.> "thumb.png")
|
||||||
|
(\a b -> "convert -resize 740x " <> a <> " " <> b)
|
||||||
|
<&> timestamped
|
||||||
|
|
||||||
|
watch pictures $ match_ "./visual.rst" do
|
||||||
|
txt <- compilePandoc
|
||||||
|
write "visual.html" $ renderVisual txt (recentFirst pictures)
|
||||||
|
|
||||||
|
-------------
|
||||||
|
-- PROJECTS
|
||||||
|
|
||||||
|
projects <- matchDir "projects/*/" do
|
||||||
|
task $ match_ "*" copyFile
|
||||||
|
matchFile "index.*" do
|
||||||
|
(meta, doc) <- readPandocMetadataWith ropts
|
||||||
|
renderPandoc doc <&> renderProject meta
|
||||||
|
>>= saveFileAs (-<.> "html")
|
||||||
|
>> (meta,) <$> getCurrentDir
|
||||||
|
|
||||||
|
watch projects $ match_ "./projects.rst" do
|
||||||
|
debug "rendering project index"
|
||||||
|
txt <- compilePandocWith def wopts
|
||||||
|
write "projects.html" $ renderProjects txt projects
|
||||||
|
|
||||||
|
------------------
|
||||||
|
-- POSTS & INDEX
|
||||||
|
|
||||||
|
posts <- match "posts/*" do
|
||||||
|
src <- copyFile
|
||||||
|
(Page title d, pdc) <- readPandocMetadata
|
||||||
|
|
||||||
|
renderPandocWith wopts pdc
|
||||||
|
<&> renderPost title src
|
||||||
|
>>= saveFileAs (-<.> "html")
|
||||||
|
<&> (d,) . (title,)
|
||||||
|
<&> timestampedWith (timestamp . snd . snd)
|
||||||
|
|
||||||
|
let visible = mapMaybe
|
||||||
|
(\(Timestamped d (dr, p)) ->
|
||||||
|
if fromMaybe False dr then Nothing
|
||||||
|
else Just $ Timestamped d p) posts
|
||||||
|
|
||||||
|
watch (take 10 visible) $ match_ "./index.rst" do
|
||||||
|
debug "rendering index"
|
||||||
|
compilePandoc
|
||||||
|
<&> renderIndex visible
|
||||||
|
>>= saveFileAs (-<.> "html")
|
||||||
|
|
||||||
|
ropts :: ReaderOptions
|
||||||
|
ropts = def { readerExtensions = enableExtension Ext_smart githubMarkdownExtensions }
|
||||||
|
|
||||||
|
wopts :: WriterOptions
|
||||||
|
wopts = def { writerHTMLMathMethod = KaTeX "" }
|
|
@ -0,0 +1,13 @@
|
||||||
|
{-# LANGUAGE DeriveGeneric #-}
|
||||||
|
|
||||||
|
module Page where
|
||||||
|
|
||||||
|
import GHC.Generics
|
||||||
|
import Data.Aeson.Types (FromJSON)
|
||||||
|
|
||||||
|
data Page = Page
|
||||||
|
{ title :: String
|
||||||
|
, draft :: Maybe Bool
|
||||||
|
} deriving (Generic, Eq, Show)
|
||||||
|
|
||||||
|
instance FromJSON Page
|
|
@ -0,0 +1,114 @@
|
||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# LANGUAGE BlockArguments #-}
|
||||||
|
|
||||||
|
module Templates where
|
||||||
|
|
||||||
|
import Data.String (fromString)
|
||||||
|
import Control.Monad (forM_)
|
||||||
|
|
||||||
|
import System.FilePath
|
||||||
|
import Text.Blaze.Internal as I
|
||||||
|
import Text.Blaze.Html5 as H
|
||||||
|
import Text.Blaze.Html5.Attributes as A
|
||||||
|
import Data.Dates.Types (DateTime(..), months, capitalize)
|
||||||
|
|
||||||
|
import Achille
|
||||||
|
import Types (Project)
|
||||||
|
import qualified Types
|
||||||
|
import qualified Data.Map.Strict as Map
|
||||||
|
|
||||||
|
showDate :: DateTime -> String
|
||||||
|
showDate (DateTime y m d _ _ _) = month <> " " <> show d <> ", " <> show y
|
||||||
|
where month = capitalize (months !! (m - 1))
|
||||||
|
|
||||||
|
loading :: AttributeValue -> Attribute
|
||||||
|
loading = I.customAttribute "loading"
|
||||||
|
|
||||||
|
toLink :: FilePath -> Html -> Html
|
||||||
|
toLink url = H.a ! A.href (fromString $ "/" <> url)
|
||||||
|
|
||||||
|
|
||||||
|
renderIndex :: [Timestamped (String, FilePath)] -> Html -> Html
|
||||||
|
renderIndex posts content =
|
||||||
|
outer do
|
||||||
|
content
|
||||||
|
H.h2 "Latest notes"
|
||||||
|
H.ul $ forM_ posts \(Timestamped d (title, src)) ->
|
||||||
|
H.li do
|
||||||
|
H.span $ fromString $ showDate d <> ": "
|
||||||
|
toLink src (fromString title)
|
||||||
|
|
||||||
|
renderPost :: String -> FilePath -> Html -> Html
|
||||||
|
renderPost title source content =
|
||||||
|
outer do
|
||||||
|
H.h1 $ fromString title
|
||||||
|
toLink source "View source"
|
||||||
|
content
|
||||||
|
|
||||||
|
renderVisual :: Html -> [Timestamped FilePath] -> Html
|
||||||
|
renderVisual txt imgs =
|
||||||
|
outer do
|
||||||
|
txt
|
||||||
|
H.hr
|
||||||
|
H.section $ forM_ imgs \ (Timestamped _ p) ->
|
||||||
|
H.figure $ H.img ! A.src (fromString p)
|
||||||
|
! loading "lazy"
|
||||||
|
|
||||||
|
renderProject :: Project -> Html -> Html
|
||||||
|
renderProject project content =
|
||||||
|
outer do
|
||||||
|
H.header ! A.class_ "project" $ do
|
||||||
|
H.div $ H.img ! A.src "logo.svg"
|
||||||
|
H.div do
|
||||||
|
H.h1 $ fromString $ Types.title project
|
||||||
|
H.p $ fromString $ Types.subtitle project
|
||||||
|
H.ul $ forM_ (Map.toList $ Types.labels project) \(k, v) -> H.li do
|
||||||
|
fromString k <> ": "
|
||||||
|
if k == "repo" then
|
||||||
|
H.a ! A.href (fromString $ "https://github.com/" <> v)
|
||||||
|
$ fromString v
|
||||||
|
else fromString v
|
||||||
|
content
|
||||||
|
|
||||||
|
renderProjects :: Html -> [(Project, FilePath)] -> Html
|
||||||
|
renderProjects txt paths =
|
||||||
|
outer do
|
||||||
|
txt
|
||||||
|
H.ul ! A.class_ "projects" $ do
|
||||||
|
forM_ paths \(project, link) -> H.li $ H.a ! A.href (fromString link) $ do
|
||||||
|
H.div $ H.img ! A.src (fromString $ link </> "logo.svg")
|
||||||
|
H.div do
|
||||||
|
H.h2 $ fromString (Types.title project)
|
||||||
|
H.p $ fromString (Types.subtitle project)
|
||||||
|
-- H.ul $ forM_ (Map.toList $ Types.labels project) \(k, v) ->
|
||||||
|
-- H.li $ (fromString k <> ": " <> fromString v)
|
||||||
|
|
||||||
|
logo :: Html
|
||||||
|
logo = preEscapedString "<svg xmlns=\"http://www.w3.org/2000/svg\" version=\"1.1\" height=\"19px\" width=\"29px\"><path d=\"M 2,2 A 5,5 0 0 1 7,7 L 7, 12 A 5, 5 0 0 1 2,17 M 7,7 A 5,5 0 0 1 12,2 L 22,2 A 5,5 0 0 1 27,7 L 27,12 A 5, 5 0 0 1 22,17 L 12,17\" style=\"stroke-width: 2; stroke-linecap: butt; stroke-linejoin: bevel; stroke: #fff\" fill=\"none\"/></svg>"
|
||||||
|
|
||||||
|
outer :: Html -> Html
|
||||||
|
outer content = H.docTypeHtml do
|
||||||
|
H.head do
|
||||||
|
H.meta ! A.name "viewport"
|
||||||
|
! A.content "width=device-width, initial-scale=1.0, user-scalable=yes"
|
||||||
|
H.meta ! A.name "theme-color" ! A.content "#000000"
|
||||||
|
H.meta ! A.name "robots" ! A.content "index, follow"
|
||||||
|
H.meta ! charset "utf-8"
|
||||||
|
H.link ! A.rel "stylesheet" ! A.href "/assets/theme.css"
|
||||||
|
H.title "sbbls"
|
||||||
|
|
||||||
|
H.body do
|
||||||
|
H.header ! A.id "hd" $ H.section do
|
||||||
|
H.a ! A.href "/" $ logo
|
||||||
|
H.section $ H.nav do
|
||||||
|
H.a ! A.href "/projects.html" $ "Projects"
|
||||||
|
H.a ! A.href "/visual.html" $ "Visual"
|
||||||
|
H.a ! A.href "/quid.html" $ "Quid"
|
||||||
|
|
||||||
|
H.main content
|
||||||
|
|
||||||
|
H.footer ! A.id "ft" $ do
|
||||||
|
"flupe 2020 · "
|
||||||
|
H.a ! A.href "https://creativecommons.org/licenses/by-nc/2.0/" $ "CC BY-NC 2.0"
|
||||||
|
" · "
|
||||||
|
H.a ! A.href "https://instagram.com/ba.bou.m/" $ "instagram"
|
|
@ -0,0 +1,22 @@
|
||||||
|
{-# LANGUAGE DeriveGeneric #-}
|
||||||
|
|
||||||
|
module Types where
|
||||||
|
|
||||||
|
import GHC.Generics
|
||||||
|
import Data.Aeson.Types (FromJSON)
|
||||||
|
import Data.Binary (Binary, put, get)
|
||||||
|
import qualified Data.Map.Strict as Map
|
||||||
|
|
||||||
|
data Project = Project
|
||||||
|
{ title :: String
|
||||||
|
, subtitle :: String
|
||||||
|
, year :: String
|
||||||
|
, labels :: Map.Map String String
|
||||||
|
, gallery :: Maybe Bool
|
||||||
|
} deriving (Generic, Eq, Show)
|
||||||
|
|
||||||
|
instance FromJSON Project
|
||||||
|
instance Binary Project where
|
||||||
|
put (Project t s y l g) = put t >> put s >> put y >> put l >> put g
|
||||||
|
get = Project <$> get <*> get <*> get <*> get <*> get
|
||||||
|
|
Loading…
Reference in New Issue