web/lukegbcom: init next.js version

This commit is contained in:
Luke Granger-Brown 2022-04-04 00:32:57 +01:00
parent f8f5d48eec
commit 762a5a7271
51 changed files with 13979 additions and 0 deletions

View file

@ -22,3 +22,5 @@ syntax: glob
*~ *~
db.sqlite3 db.sqlite3
node_modules/ node_modules/
.next/
out/

View file

@ -21,4 +21,6 @@
--replace "{{DEPOT_VERSION}}" "$depotVersion" --replace "{{DEPOT_VERSION}}" "$depotVersion"
done done
''; '';
lukegbcom = import ./lukegbcom args;
} }

4
web/lukegbcom/.babelrc Normal file
View file

@ -0,0 +1,4 @@
{
"presets": ["next/babel"],
"plugins": ["react-optimized-image/plugin"]
}

View file

@ -0,0 +1,3 @@
[**.{js,scss,css}]
indent_style = space
indent_size = 2

View file

@ -0,0 +1,3 @@
{
"extends": "next/core-web-vitals"
}

32
web/lukegbcom/.gitignore vendored Normal file
View file

@ -0,0 +1,32 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# local env files
.env*.local
# vercel
.vercel

33
web/lukegbcom/default.nix Normal file
View file

@ -0,0 +1,33 @@
{ pkgs, ... }:
let
nodejs = pkgs.nodejs-12_x;
composition = pkgs.callPackage ./node-overrides.nix { inherit nodejs; };
inherit (composition.shell) nodeDependencies;
in
pkgs.stdenv.mkDerivation {
name = "lukegbcom-export";
src = pkgs.nix-gitignore.gitignoreSourcePure [
".next"
"out"
"build"
".pnp"
"node_modules"
".pnp.js"
] ./.;
buildInputs = [ nodejs ];
buildPhase = ''
ln -s ${nodeDependencies}/lib/node_modules ./node_modules
export PATH="${nodeDependencies}/bin:$PATH"
echo Starting build
next build
echo Starting static export
next export
'';
installPhase = ''
echo Done - moving to output
mv out $out
'';
}

View file

@ -0,0 +1,26 @@
import { useRouter } from 'next/router'
import Link from 'next/link'
import React from 'react'
function isActive(linkHref, currentPath) {
if (linkHref === currentPath) return true;
const candidateHref = linkHref + '/';
return candidateHref === currentPath.substring(0, candidateHref.length)
}
function ActiveLink(props) {
const router = useRouter()
const child = props.children
const childProps = {
className: child.props.className + ' ' + (isActive(props.href, router.asPath) ? props.activeClassName : ''),
}
return (
<Link {...props}>
{React.cloneElement(child, childProps)}
</Link>
)
}
export default ActiveLink

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24"><path d="M12 0C5.374 0 0 5.373 0 12c0 5.302 3.438 9.8 8.207 11.387.6.11.793-.26.793-.577v-2.234c-3.338.726-4.033-1.416-4.033-1.416-.546-1.387-1.333-1.756-1.333-1.756-1.09-.745.083-.73.083-.73 1.205.085 1.84 1.238 1.84 1.238 1.07 1.834 2.806 1.304 3.49.997.108-.776.42-1.306.763-1.605-2.665-.305-5.467-1.334-5.467-5.93 0-1.312.47-2.382 1.236-3.222-.125-.303-.536-1.524.116-3.176 0 0 1.008-.322 3.3 1.23A11.51 11.51 0 0 1 12 5.803c1.02.005 2.047.138 3.006.404 2.29-1.552 3.297-1.23 3.297-1.23.653 1.653.242 2.874.118 3.176.77.84 1.236 1.91 1.236 3.22 0 4.61-2.807 5.625-5.48 5.922.43.372.824 1.102.824 2.222v3.293c0 .32.192.694.8.576C20.567 21.796 24 17.3 24 12c0-6.627-5.373-12-12-12z"/></svg>

After

Width:  |  Height:  |  Size: 776 B

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24"><path d="M24 4.557a9.83 9.83 0 0 1-2.828.775 4.932 4.932 0 0 0 2.165-2.724 9.864 9.864 0 0 1-3.127 1.195 4.916 4.916 0 0 0-3.594-1.555c-3.18 0-5.515 2.966-4.797 6.045A13.978 13.978 0 0 1 1.67 3.15a4.93 4.93 0 0 0 1.524 6.573 4.903 4.903 0 0 1-2.23-.616c-.053 2.28 1.582 4.415 3.95 4.89a4.935 4.935 0 0 1-2.224.084 4.928 4.928 0 0 0 4.6 3.42A9.9 9.9 0 0 1 0 19.54a13.94 13.94 0 0 0 7.548 2.212c9.142 0 14.307-7.72 13.995-14.646A10.025 10.025 0 0 0 24 4.556z"/></svg>

After

Width:  |  Height:  |  Size: 549 B

View file

@ -0,0 +1,33 @@
import styles from './index.module.scss'
import ActiveLink from '../ActiveLink'
import { Svg } from 'react-optimized-image'
export default function HeaderNav() {
return (
<header className={styles.header}>
<nav className={styles.nav}>
<div className={styles.navPart1}>
<ActiveLink href="/" activeClassName={styles.active}><a className={styles.link}>Home</a></ActiveLink>
<ActiveLink href="/posts" activeClassName={styles.active}><a className={styles.link}>Posts</a></ActiveLink>
<ActiveLink href="/tools" activeClassName={styles.active}><a className={styles.link}>Toolbox</a></ActiveLink>
</div>
<div className={styles.navPart2}>
<a
href="https://twitter.com/lukegb"
className={styles.link}
aria-label="Twitter"
>
<Svg src={require('./assets/iconmonstr-twitter-1.svg')} viewBox="0 0 24 24" />
</a>
<a
href="https://github.com/lukegb"
className={styles.link}
aria-label="GitHub"
>
<Svg src={require('./assets/iconmonstr-github-1.svg')} viewBox="0 0 24 24" />
</a>
</div>
</nav>
</header>
)
}

View file

@ -0,0 +1,55 @@
.header {
--nav-item-padding: 0.5rem;
position: absolute;
z-index: 20;
/* stretch */
left: 0;
right: 0;
color: var(--colorNeutralLight);
}
.nav {
display: flex;
flex-direction: row;
justify-content: space-between;
max-width: calc(var(--maxWidth) + (var(--nav-item-padding) * 2));
margin: 0 auto;
padding: 0;
line-height: 3rem;
}
.navPart1,
.navPart2 {
display: flex;
flex-direction: row;
}
.link {
display: flex;
align-items: center;
padding: 0 var(--nav-item-padding);
color: var(--colorNeutralLight);
text-decoration: none !important;
opacity: 0.6;
transition: 0.4s all;
border-bottom: 1px solid transparent;
&:hover,
&:focus,
&.active {
color: inherit;
opacity: 1;
border-bottom-color: var(--colorNeutralLight);
}
}
.link svg {
fill: currentColor;
opacity: 0.5;
/*margin-right: 0.5rem;*/
width: 14px;
height: 14px;
vertical-align: middle;
}

View file

@ -0,0 +1,45 @@
import styles from './index.module.scss'
function processHeroURL(url) {
return url;
}
function gradient(gradientName) {
switch (gradientName) {
case true, 'bottom-black':
return 'linear-gradient(to bottom, rgba(255, 255, 255, 0), rgba(20, 20, 20, 1))'
case 'top-black':
return 'linear-gradient(180deg,#111,hsla(228,2%,56%,.2))'
case 'top-blue':
return 'linear-gradient(180deg,#107491,rgba(0,122,204,.2))'
}
console.error(`invalid withGradient in HeroImage: ${gradientName}`)
}
function computeBackground({ withGradient, image }) {
if (image) {
return {
background: `${withGradient ? gradient(withGradient) + ', ' : ''} #111 url(${processHeroURL(image)}) 50% 50% / cover`
}
}
return {
backgroundColor: '#46be77',
background: 'linear-gradient(180deg,#107491,rgba(0,122,204,.2)), radial-gradient(ellipse at 50% 50%,#6ccb94 15%,#46be77 70%)',
}
}
export default function HeroImage(props) {
return (
<div className={`${styles.container} ${props.fullHeight ? styles.fullHeight : ''}`}
style={computeBackground(props)}>
<div className={styles.heroCard}>
<h1 className={styles.heroTitle}>
{props.children}
</h1>
</div>
{props.credit && (<div className={styles.heroCredit}>
<span>Photo:</span> <a href={props.credit.url}>{props.credit.text}</a>
</div>)}
</div>
)
}

View file

@ -0,0 +1,73 @@
.container {
position: relative;
margin-bottom: 1em;
}
.heroCard {
display: flex;
flex-direction: column;
padding: 5rem 0;
padding: 10vh 0;
text-align: center;
}
.fullHeight {
&.container {
height: 100vh;
}
.heroCard {
left: 0;
position: absolute;
text-align: center;
top: 50%;
transform: translateY(-50%);
width: 100%;
}
.heroTitle {
font-size: 30pt;
}
}
.heroCard:after {
clear: both;
display: block;
}
.heroTitle {
color: white;
font-size: 20pt;
line-height: 1;
text-shadow: 0 0 6px #111, 0 0 2px #111;
letter-spacing: 1px;
}
.heroCredit {
position: absolute;
bottom: 0.2em;
right: 0.3em;
opacity: 0.2;
color: var(--colorNeutralLight);
transition: opacity 0.5s ease-out;
}
.heroCredit:hover {
opacity: 1;
}
.heroCredit a::after {
content: "";
position: absolute;
left: 0;
display: inline-block;
height: 1em;
width: 100%;
border-bottom: 1px solid var(--colorNeutralLight);
margin-top: 5px;
opacity: 0;
transition: opacity 0.2s ease-out;
}
.heroCredit:hover a::after {
opacity: 1;
}
.heroCredit a:hover {
text-decoration: none;
}

View file

@ -0,0 +1,208 @@
import React from "react"
import IPInput from "./IPInput"
import RFCLink from "./RFCLink"
import { Address4, Address6 } from "ip-address"
import { BigInteger } from "jsbn"
import styles from "./CIDRCalculator.module.css"
const IP4IN6SUBNET = '64:ff9b::/96'
const IPv6TYPEEXTS = {
'::1/128': (<RFCLink rfc={4291}>Loopback</RFCLink>),
'::/128': (<RFCLink rfc={4291}>Unspecified</RFCLink>),
'::ffff:0:0/96': (<RFCLink rfc={4291}>IPv4 Mapped</RFCLink>),
'64:ff9b::/96': (<RFCLink rfc={6052}>IPv4-IPv6 Translation</RFCLink>),
'100::/64': (<RFCLink rfc={6666}>Discard-Only Address Block</RFCLink>),
'2001::/32': (<RFCLink rfc={4280}>TEREDO Tunnelling</RFCLink>),
'2001:1::1/128': (<RFCLink rfc={7723}>Port Control Protocol Anycast</RFCLink>),
'2001:1::2/128': (<RFCLink rfc={8155}>Traversal Using Relays around NAT Anycast</RFCLink>),
'2001:2::/48': (<RFCLink rfc={5180}>Benchmarking</RFCLink>),
'2001:3::/32': (<RFCLink rfc={7450}>AMT</RFCLink>),
'2001:4:112::/48': (<RFCLink rfc={7535}>AS112-v6</RFCLink>),
'2001:5::/32': (<RFCLink rfc={7954}>EID Space for LISP</RFCLink>),
'2001:10::/28': (<RFCLink rfc={4843}>Deprecated (previously ORCHID)</RFCLink>),
'2001:20::/28': (<RFCLink rfc={7343}>ORCHIDv2</RFCLink>),
'2002::/16': (<RFCLink rfc={3056}>6to4</RFCLink>),
'2620:4f:8000::/48': (<RFCLink rfc={7534}>Direct Delegation AS112 Service</RFCLink>),
'fc00::/7': (<RFCLink rfc={4193}>Unique-Local</RFCLink>),
'fe80::/10': (<RFCLink rfc={4291}>Linked-Scoped Unicast</RFCLink>),
}
const IPv4TYPES = {
'0.0.0.0/8': (<RFCLink rfc={1700}>Broadcast to "this"</RFCLink>),
'10.0.0.0/8': (<RFCLink rfc={1918}>Private network</RFCLink>),
'100.64.0.0/10': (<RFCLink rfc={6598}>Carrier-grade NAT private network</RFCLink>),
'127.0.0.0/8': (<RFCLink rfc={990}>Loopback</RFCLink>),
'169.254.0.0/16': (<RFCLink rfc={3927}>Link-local</RFCLink>),
'172.16.0.0/12': (<RFCLink rfc={1918}>Private network</RFCLink>),
'192.0.0.0/24': (<RFCLink rfc={5736}>IANA IPv4 Special Purpose Address Registry</RFCLink>),
'192.0.2.0/24': (<RFCLink rfc={5737}>"TEST-NET" for documentation/examples</RFCLink>),
'192.88.99.0/24': (<RFCLink rfc={3068}>6to4 anycast relays</RFCLink>),
'192.168.0.0/16': (<RFCLink rfc={1918}>Private network</RFCLink>),
'198.18.0.0/15': (<RFCLink rfc={2544}>Testing networking equipment</RFCLink>),
'198.51.100.0/24': (<RFCLink rfc={5737}>"TEST-NET-2" for documentation/examples</RFCLink>),
'203.0.113.0/24': (<RFCLink rfc={5737}>"TEST-NET-3" for documentation/examples</RFCLink>),
'224.0.0.0/4': (<RFCLink rfc={1112}>Multicast</RFCLink>),
'240.0.0.0/4': (<RFCLink rfc={6890}>Future use</RFCLink>),
'255.255.255.255/32': (<RFCLink rfc={6890}>"Limited broadcast" destination</RFCLink>),
}
const find = (collection, predicate) => {
for (const key of Object.getOwnPropertyNames(collection)) {
const item = collection[key]
if (predicate(item, key)) return item
}
}
const v6Type = (v6) => {
const isType = (name, type) => v6.isInSubnet(new Address6(type))
let type = v6.getType()
if (type !== 'Global unicast') return type
return find(IPv6TYPEEXTS, isType) || type
}
const v4Type = (v4) => {
const isType = (name, type) => v4.isInSubnet(new Address4(type))
return find(IPv4TYPES, isType) || 'Global unicast'
}
const v4BitsToMask = (subnetMask) => {
let n = 0
for (let i = 0; i < subnetMask; i++) {
n = (n << 1) | 1
}
n <<= 32 - subnetMask
const f = (b) => ((n >> b) & 0xff).toString(10)
return `${f(24)}.${f(16)}.${f(8)}.${f(0)}`
}
const highlightElement = (e) => {
const range = document.createRange()
range.selectNodeContents(e)
const sel = window.getSelection()
sel.removeAllRanges()
sel.addRange(range)
}
const highlight = (ev) => highlightElement(ev.target)
const highlightNext = (ev) => highlightElement(ev.target.nextElementSibling)
const toDL = (pairs) => (
<dl>
{pairs.map(pair => (
([
<dt className={styles.cidrHead} onClick={highlightNext} key={`dt:${pair[0]}`}>{pair[0]}</dt>,
<dd className={styles.cidrText} onClick={highlight} key={`dd:${pair[0]}`}>{pair[1]}</dd>
])
))}
</dl>
)
class CIDRCalculator extends React.Component {
constructor(props) {
super(props)
this.state = {
}
this.handleIPChange = this.handleIPChange.bind(this)
this.highlight = this.highlight.bind(this)
}
handleIPChange(value) {
this.setState({ ip: value })
}
highlight(event) {
const range = document.createRange()
range.selectNodeContents(event.target)
const sel = window.getSelection()
sel.removeAllRanges()
sel.addRange(range)
}
renderIP(ip) {
if (!ip) return []
if (ip.to4) return this.renderIPv6(ip)
return this.renderIPv4(ip)
}
renderIPv6(ip) {
const meaningful4in6 = ip.isInSubnet(new Address6(IP4IN6SUBNET))
let info = [
['IPv6 Address', ip.correctForm()],
['Type', v6Type(ip)],
['Prefix Size', ip.subnet],
['Scope', ip.getScope()],
[`4-in-6${meaningful4in6 ? '' : ' (probably meaningless)'}`, ip.to4().correctForm()],
]
if (ip.getBits(88, 104).toString(16) === 'fffe') {
const highBitsPreFix = ip.getBits(64, 88)
const lowBits = ip.getBits(104, 128)
const highBits = highBitsPreFix.xor(new BigInteger((1 << 17).toString(10)))
const mac = highBits.shiftLeft(new BigInteger('24')).or(lowBits)
const mask = new BigInteger('255')
info.push(['Embedded MAC Address', [...Array(6).keys()].map((n) => {
return mac.shiftRight(new BigInteger((8 * 5 - n * 8).toString())).and(mask).toString(16).padStart(2, '0')
}).join(':')])
}
if (ip.subnetMask <= 126) {
const firstAddress = ip.startAddress()
const lastAddress = ip.endAddress()
info = info.concat([
['First Usable Address', firstAddress.correctForm()],
['Last Usable Address', lastAddress.correctForm()],
['Usable Addresses', lastAddress.bigInteger().subtract(firstAddress.bigInteger()).toString()],
])
}
return toDL(info)
}
renderIPv4(ip) {
const netAddress = ip.startAddress()
const firstAddress = Address4.fromBigInteger(netAddress.bigInteger().add(BigInteger.ONE))
const broadcastAddress = ip.endAddress()
const lastAddress = Address4.fromBigInteger(broadcastAddress.bigInteger().subtract(BigInteger.ONE))
let info = [
['IPv4 Address', ip.correctForm()],
['Type', v4Type(ip)],
['Prefix Size', ip.subnet],
['Subnet Mask', v4BitsToMask(ip.subnetMask)],
]
if (ip.subnetMask <= 30)
info = info.concat([
['Network Address', netAddress.correctForm()],
['First Usable Address', firstAddress.correctForm()],
['Last Usable Address', lastAddress.correctForm()],
['Broadcast Address', broadcastAddress.correctForm()],
['Usable Addresses', lastAddress.bigInteger().subtract(firstAddress.bigInteger()).toString()],
])
else if (ip.subnetMask == 31)
info = info.concat([
['First Usable Address (point-to-point)', netAddress.correctForm()],
['Last Usable Address (point-to-point)', firstAddress.correctForm()],
])
return toDL(info)
}
render() {
return (
<div>
<form>
<label>IP Address/Subnet: <IPInput onChange={this.handleIPChange} ip={this.state.ip}></IPInput></label>
{this.renderIP(this.state.ip)}
</form>
</div>
)
}
}
export default CIDRCalculator

View file

@ -0,0 +1,7 @@
.cidrHead {
font-weight: bold;
}
.cidrText a {
text-decoration: underline;
}

View file

@ -0,0 +1,72 @@
import React from "react"
import { Address4, Address6 } from "ip-address"
const ENDS_WITH_MASK_RE = /\/(((255|254|252|248|240|224|192|128|0)\.){3}(255|254|252|248|240|224|192|128|0))$/
const DEFAULT_IP = '2001:4860:4860::8888/32' // hi, 8.8.8.8
class IPInput extends React.Component {
constructor(props) {
super(props)
const ip = props.ip ? (props.ip.correctForm ? props.ip.correctForm() : props.ip) : DEFAULT_IP
this.state = { text: ip }
this.handleChange = this.handleChange.bind(this)
}
componentDidMount() {
this.setNewIP(this.state.text)
}
handleChange(event) {
this.setState({
text: this.setNewIP(event.target.value),
})
}
setNewIP(realIP) {
let ip = realIP
const ipv4endmask = ENDS_WITH_MASK_RE.exec(ip)
let valid = true
if (ipv4endmask) {
ip = ip.slice(0, ip.length - ipv4endmask[0].length)
const netmaskv4 = new Address4(ipv4endmask[1])
const nmbi = netmaskv4.bigInteger()
const prefixLen = 32 - nmbi.getLowestSetBit()
for (let n = 32 - prefixLen; n < 32 && valid; n++) {
if (!nmbi.testBit(n)) valid = false
}
if (valid)
ip += `/${prefixLen}`
}
let ipv4 = null
try {
ipv4 = new Address4(ip)
} catch (e) { }
let ipv6 = null
try {
ipv6 = new Address6(ip)
} catch (e) { }
valid = valid && (ipv6 || ipv4)
if (valid)
if (ipv6)
this.props.onChange(ipv6)
else
this.props.onChange(ipv4)
else
this.props.onChange(null)
return realIP
}
render() {
return (
<input type="text" value={this.state.text} onChange={this.handleChange}></input>
)
}
}
export default IPInput

View file

@ -0,0 +1,12 @@
import React from "react"
const RFCLink = ({ rfc, draft, children }) => {
const destination = draft ? `https://www.iana.org/go/draft-${rfc}` : `https://tools.ietf.org/html/rfc${rfc}`;
const text = draft ? `RFC-${rfc}` : `RFC${rfc}`;
return (
<a href={destination}>{children} ({text})</a>
)
}
export default RFCLink

View file

@ -0,0 +1,13 @@
import CIDRCalculator from "./CIDRCalculator"
export default function NetworkingTools() {
return (
<div>
<section>
<h2>CIDR Calculator</h2>
<CIDRCalculator />
<small>Thanks to <a href="https://github.com/beaugunderson/ip-address/">ip-address</a> for a bunch of heavy lifting</small>
</section>
</div>
)
}

View file

@ -0,0 +1,29 @@
import Link from 'next/link'
import styles from './index.module.scss'
export default function LatestPosts({ posts }) {
return (
<div className={styles.container}>
<ul className={styles.postsList}>
{posts.map((post) => (
<li className={styles.post} key={post.slug}>
<div>
<Link href={`/posts/${encodeURIComponent(post.slug)}`}>
<a>
<h3 className={styles.postHeader}>{post.title}</h3>
</a>
</Link>
<time className={styles.postTimestamp}>{post.date.toString()}</time>
<div className={styles.postBlurb} dangerouslySetInnerHTML={{ __html: post.excerptHtml }}>{post.excerpt}</div>
<Link href={`/posts/${encodeURIComponent(post.slug)}`}>
<a className={styles.postReadMoreButtonLink}>
<span role="button" className={styles.postReadMoreButton}>Read More </span>
</a>
</Link>
</div>
</li>
))}
</ul>
</div>
)
}

View file

@ -0,0 +1,54 @@
.container {
display: flex;
flex-direction: column;
align-items: center;
}
.postsList {
max-width: 40rem;
display: flex;
flex-direction: column;
align-items: flex-start;
list-style: none;
padding: 0;
margin: 0 2rem;
}
.postHeader {
font-size: 1.2rem;
font-weight: 900;
margin-bottom: 0.1rem;
}
.postTimestamp {
font-size: 0.75rem;
opacity: 0.6;
}
.postBlurb {
font-size: 0.85rem;
margin: 0.4rem 0;
}
.postReadMoreButtonLink {
color: #46be77;
&:hover {
color: #22846c;
text-decoration: none;
}
}
.postReadMoreButton {
display: inline-flex;
padding: 0 1rem;
line-height: 2rem;
font-weight: 100;
text-align: center;
border-radius: 3px;
transition: all 0.2s;
background: rgba(251, 252, 252, 0.2);
border: 1px solid currentColor;
font-size: 0.8rem;
}

View file

@ -0,0 +1,74 @@
import fs from 'fs'
import path from 'path'
import matter from 'gray-matter'
import { unified } from 'unified'
import remarkParse from 'remark-parse'
import remarkGfm from 'remark-gfm'
import remarkRehype from 'remark-rehype'
import rehypeHighlight from 'rehype-highlight'
import rehypeStringify from 'rehype-stringify'
const fsPromises = fs.promises
const postsDirectory = path.join(process.cwd(), 'posts')
async function markdownToHtml(content) {
return unified()
.use(remarkParse)
.use(remarkGfm)
.use(remarkRehype)
.use(rehypeHighlight)
.use(rehypeStringify)
.process(content)
}
export async function getPostSlugs() {
return (await fsPromises.readdir(postsDirectory)).filter(fileName => fileName.match(/\.md$/)).map(fileName => {
return fileName.replace(/\.md$/, '')
})
}
export async function _getPostBySlug(slug) {
const fileName = `${slug}.md`
const fullPath = path.join(postsDirectory, fileName)
const fileContents = await fsPromises.readFile(fullPath, 'utf8')
const matterResult = matter(fileContents, { excerpt: true })
const processedContent = await markdownToHtml(matterResult.content)
const processedExcerpt = await markdownToHtml(matterResult.excerpt)
return {
...matterResult.data,
slug,
date: matterResult.data.date.toISOString().substring(0, 10),
excerptHtml: processedExcerpt.toString(),
contentHtml: processedContent.toString(),
sortKey: matterResult.data.date,
}
}
export async function getPostBySlug(slug) {
const data = await _getPostBySlug(slug)
delete data.sortKey
return data
}
export async function getSortedPostsData() {
const allPostsData = await Promise.all((await getPostSlugs()).map(_getPostBySlug))
return allPostsData.sort(({ sortKey: a }, { sortKey: b }) => {
[a, b] = [Date.parse(a), Date.parse(b)]
if (a < b) {
return 1
} else if (a > b) {
return -1
} else {
return 0
}
}).map((x) => {
delete x.contentHtml
delete x.sortKey
return x
})
}

View file

@ -0,0 +1 @@
$maxWidth: 61rem;

View file

@ -0,0 +1,18 @@
const withPlugins = require('next-compose-plugins');
const optimizedImages = require('next-optimized-images');
/** @type {import('next').NextConfig} */
const nextConfig = {
reactStrictMode: true,
images: {
loader: 'custom',
disableStaticImages: true,
},
}
module.exports = withPlugins([
[optimizedImages, {
// config
}],
], nextConfig);

View file

@ -0,0 +1,17 @@
# This file has been generated by node2nix 1.9.0. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-12_x"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};
in
import ./node-packages.nix {
inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit;
inherit nodeEnv;
}

588
web/lukegbcom/node-env.nix Normal file
View file

@ -0,0 +1,588 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
python = if nodejs ? python then nodejs.python else python2;
# Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
tarWrapper = runCommand "tarWrapper" {} ''
mkdir -p $out/bin
cat > $out/bin/tar <<EOF
#! ${stdenv.shell} -e
$(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
EOF
chmod +x $out/bin/tar
'';
# Function that generates a TGZ file from a NPM project
buildNodeSourceDist =
{ name, version, src, ... }:
stdenv.mkDerivation {
name = "node-tarball-${name}-${version}";
inherit src;
buildInputs = [ nodejs ];
buildPhase = ''
export HOME=$TMPDIR
tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
'';
installPhase = ''
mkdir -p $out/tarballs
mv $tgzFile $out/tarballs
mkdir -p $out/nix-support
echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
'';
};
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
local strippedName
local DIR=$PWD
cd $TMPDIR
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
# Restore write permissions to make building work
find "$packageDir" -type d -exec chmod u+x {} \;
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/$packageName"
fi
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.name}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
'';
pinpointDependencies = {dependencies, production}:
let
pinpointDependenciesFromPackageJSON = writeTextFile {
name = "pinpointDependencies.js";
text = ''
var fs = require('fs');
var path = require('path');
function resolveDependencyVersion(location, name) {
if(location == process.env['NIX_STORE']) {
return null;
} else {
var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
if(fs.existsSync(dependencyPackageJSON)) {
var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
if(dependencyPackageObj.name == name) {
return dependencyPackageObj.version;
}
} else {
return resolveDependencyVersion(path.resolve(location, ".."), name);
}
}
}
function replaceDependencies(dependencies) {
if(typeof dependencies == "object" && dependencies !== null) {
for(var dependency in dependencies) {
var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
if(resolvedVersion === null) {
process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
} else {
dependencies[dependency] = resolvedVersion;
}
}
}
}
/* Read the package.json configuration */
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
/* Pinpoint all dependencies */
replaceDependencies(packageObj.dependencies);
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
replaceDependencies(packageObj.optionalDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
'';
};
in
''
node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
${lib.optionalString (dependencies != [])
''
if [ -d node_modules ]
then
cd node_modules
${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
cd ..
fi
''}
'';
# Recursively traverses all dependencies of a package and pinpoints all
# dependencies in the package.json file to the versions that are actually
# being used.
pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
''
if [ -d "${packageName}" ]
then
cd "${packageName}"
${pinpointDependencies { inherit dependencies production; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
fi
'';
# Extract the Node.js source code which is used to compile packages with
# native bindings
nodeSources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv node-* $out
'';
# Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
addIntegrityFieldsScript = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
function augmentDependencies(baseDir, dependencies) {
for(var dependencyName in dependencies) {
var dependency = dependencies[dependencyName];
// Open package.json and augment metadata fields
var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
var packageJSONPath = path.join(packageJSONDir, "package.json");
if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
console.log("Adding metadata fields to: "+packageJSONPath);
var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
if(dependency.integrity) {
packageObj["_integrity"] = dependency.integrity;
} else {
packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
}
if(dependency.resolved) {
packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
} else {
packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
}
if(dependency.from !== undefined) { // Adopt from property if one has been provided
packageObj["_from"] = dependency.from;
}
fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
}
// Augment transitive dependencies
if(dependency.dependencies !== undefined) {
augmentDependencies(packageJSONDir, dependency.dependencies);
}
}
}
if(fs.existsSync("./package-lock.json")) {
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
augmentDependencies(".", packageLock.dependencies);
}
}
'';
};
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "addintegrityfields.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 1,
requires: true,
dependencies: {}
};
function augmentPackageJSON(filePath, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
files.forEach(function(entry) {
var filePath = path.join(dir, entry);
var stats = fs.statSync(filePath);
if(stats.isDirectory()) {
if(entry.substr(0, 1) == "@") {
// When we encounter a namespace folder, augment all packages belonging to the scope
var pkgFiles = fs.readdirSync(filePath);
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, dependencies);
}
});
} else {
augmentPackageJSON(filePath, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
in
''
# Pinpoint the versions of all dependencies to the ones that are actually being used
echo "pinpointing versions of dependencies..."
source $pinpointDependenciesScriptPath
# Patch the shebangs of the bundled modules to prevent them from
# calling executables outside the Nix store as much as possible
patchShebangs .
# Deploy the Node.js package by running npm install. Since the
# dependencies have been provided already by ourselves, it should not
# attempt to install them again, which is good, because we want to make
# it Nix's responsibility. If it needs to install any dependencies
# anyway (e.g. because the dependency parameters are
# incomplete/incorrect), it fails.
#
# The other responsibilities of NPM are kept -- version checks, build
# steps, postprocessing etc.
export HOME=$TMPDIR
cd "${packageName}"
runHook preRebuild
${lib.optionalString bypassCache ''
${lib.optionalString reconstructLock ''
if [ -f package-lock.json ]
then
echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
rm package-lock.json
else
echo "No package-lock.json file found, reconstructing..."
fi
node ${reconstructPackageLock}
''}
node ${addIntegrityFieldsScript}
''}
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
fi
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, preRebuild ? ""
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, meta ? {}
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit nodejs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall preRebuild unpackPhase buildPhase;
compositionScript = composePackage args;
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
# Compose the package and all its dependencies
source $compositionScriptPath
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Create symlink to the deployed executable folder, if applicable
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
fi
# Create symlinks to the deployed manual page folders, if applicable
if [ -d "$out/lib/node_modules/${packageName}/man" ]
then
mkdir -p $out/share
for dir in "$out/lib/node_modules/${packageName}/man/"*
do
mkdir -p $out/share/man/$(basename "$dir")
for page in "$dir"/*
do
ln -s $page $out/share/man/$(basename "$dir")
done
done
fi
# Run post install hook, if provided
runHook postInstall
'';
meta = {
# default to Node.js' platforms
platforms = nodejs.meta.platforms;
} // meta;
} // extraArgs);
# Builds a node environment (a node_modules folder and a set of binaries)
buildNodeDependencies =
{ name
, packageName
, version
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}-${version}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
++ buildInputs;
inherit dontStrip; # Stripping may fail a build for some package deployments
inherit dontNpmInstall unpackPhase buildPhase;
includeScript = includeDependencies { inherit dependencies; };
pinpointDependenciesScript = pinpointDependenciesOfPackage args;
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
source $includeScriptPath
# Create fake package.json to make the npm commands work properly
cp ${src}/package.json .
chmod 644 package.json
${lib.optionalString bypassCache ''
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
fi
''}
# Go to the parent folder to make sure that all packages are pinpointed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
# Expose the executables that were installed
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
mv ${packageName} lib
ln -s $out/lib/node_modules/.bin $out/bin
'';
} // extraArgs);
# Builds a development shell
buildNodeShell =
{ name
, packageName
, version
, src
, dependencies ? []
, buildInputs ? []
, production ? true
, npmFlags ? ""
, dontNpmInstall ? false
, bypassCache ? false
, reconstructLock ? false
, dontStrip ? true
, unpackPhase ? "true"
, buildPhase ? "true"
, ... }@args:
let
nodeDependencies = buildNodeDependencies args;
in
stdenv.mkDerivation {
name = "node-shell-${name}-${version}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
mkdir -p $out/bin
cat > $out/bin/shell <<EOF
#! ${stdenv.shell} -e
$shellHook
exec ${stdenv.shell}
EOF
chmod +x $out/bin/shell
'';
# Provide the dependencies in a development shell through the NODE_PATH environment variable
inherit nodeDependencies;
shellHook = lib.optionalString (dependencies != []) ''
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
};
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
buildNodePackage = lib.makeOverridable buildNodePackage;
buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
buildNodeShell = lib.makeOverridable buildNodeShell;
}

View file

@ -0,0 +1,29 @@
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-12_x"}:
let
nodePackages = import ./node-composition.nix {
inherit pkgs system nodejs;
};
override = orig: {
buildInputs = (orig.buildInputs or []) ++ (with pkgs; [ pkg-config vips glib ]);
src = contentFreeSrc;
};
contentFreeSrc = pkgs.stdenv.mkDerivation {
name = nodePackages.args.name + "-package-json";
src = pkgs.nix-gitignore.gitignoreSourcePure [
"*"
"!package.json"
"!package-lock.json"
] nodePackages.args.src;
dontBuild = true;
installPhase = "mkdir -p $out; cp -r ./* $out;";
};
in
nodePackages // (with nodePackages; {
tarball = tarball.override override;
package = package.override override;
shell = shell.override override;
nodeDependencies = nodeDependencies.override override;
})

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,8 @@
#!/usr/bin/env nix-shell
#!nix-shell -p nodePackages.node2nix -i bash
exec node2nix \
-i package.json \
-l package-lock.json \
-o node-packages.nix \
-c node-composition.nix

5991
web/lukegbcom/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,34 @@
{
"name": "lukegbcom",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint"
},
"dependencies": {
"@babel/preset-env": "^7.16.11",
"@babel/preset-react": "^7.16.7",
"gray-matter": "^4.0.3",
"ip-address": "^8.1.0",
"jsbn": "^1.1.0",
"next": "12.1.4",
"next-compose-plugins": "^2.2.1",
"next-optimized-images": "^3.0.0-canary.10",
"react": "18.0.0",
"react-dom": "18.0.0",
"rehype-highlight": "^5.0.2",
"rehype-stringify": "^9.0.3",
"remark-gfm": "^3.0.1",
"remark-parse": "^10.0.1",
"remark-rehype": "^10.1.0",
"sass": "^1.49.11",
"unified": "^10.1.2"
},
"devDependencies": {
"eslint": "8.12.0",
"eslint-config-next": "12.1.4"
}
}

View file

@ -0,0 +1,7 @@
import '../styles/globals.scss'
function MyApp({ Component, pageProps }) {
return <Component {...pageProps} />
}
export default MyApp

View file

@ -0,0 +1,38 @@
import Head from 'next/head'
import styles from '../styles/Home.module.css'
import HeaderNav from '../lib/HeaderNav'
import HeroImage from '../lib/HeroImage'
import PostsList from '../lib/PostsList'
import { getSortedPostsData } from '../lib/posts'
export async function getStaticProps() {
const allPostsData = await getSortedPostsData()
return {
props: {
allPostsData
}
}
}
export default function Home({ allPostsData }) {
return (
<div className={styles.container}>
<HeaderNav />
<HeroImage image="https://source.unsplash.com/Q1p7bh3SHj8/3841x2160" credit={{ url: "https://unsplash.com/photos/Q1p7bh3SHj8", text: "NASA" }} withGradient="bottom-black" fullHeight={true}>
Luke Granger-Brown
</HeroImage>
<Head>
<title>Luke Granger-Brown</title>
<link rel="icon" href="/favicon.ico" />
</Head>
<main className={styles.main}>
<h2 className={styles.latestPostsHeader}>
Latest Posts
</h2>
<PostsList posts={allPostsData} />
</main>
</div>
)
}

View file

@ -0,0 +1,34 @@
import Head from 'next/head'
import styles from '../styles/Home.module.css'
import HeaderNav from '../lib/HeaderNav'
import HeroImage from '../lib/HeroImage'
import PostsList from '../lib/PostsList'
import { getSortedPostsData } from '../lib/posts'
export async function getStaticProps() {
const allPostsData = await getSortedPostsData()
return {
props: {
allPostsData
}
}
}
export default function Posts({ allPostsData }) {
return (
<div className={styles.container}>
<HeaderNav />
<HeroImage image="https://source.unsplash.com/IIDxzNru2GY/3841x2160" credit={{ url: "https://unsplash.com/photos/IIDxzNru2GY", text: "Csabi Elter" }} withGradient="top-black">
Posts
</HeroImage>
<Head>
<title>Posts | Luke Granger-Brown</title>
<link rel="icon" href="/favicon.ico" />
</Head>
<main className={styles.main}>
<PostsList posts={allPostsData} />
</main>
</div>
)
}

View file

@ -0,0 +1,51 @@
import Head from 'next/head'
import styles from '../../styles/Post.module.scss'
import HeaderNav from '../../lib/HeaderNav'
import HeroImage from '../../lib/HeroImage'
import { getPostSlugs, getPostBySlug } from '../../lib/posts'
export async function getStaticPaths() {
return {
paths: (await getPostSlugs()).map((slug) => ({
params: {
slug,
}
})),
fallback: false,
}
}
export async function getStaticProps({ params }) {
return {
props: {
postData: await getPostBySlug(params.slug),
}
}
}
function generateCredit(postData) {
if (!postData['hero credit']) return null;
return {
url: postData['hero credit'],
text: postData['hero credit text'],
}
}
export default function Post({ postData }) {
return (
<div className={styles.container}>
<HeaderNav />
<HeroImage image={postData.hero} credit={generateCredit(postData)} withGradient={true}>
{postData.title}
</HeroImage>
<Head>
<title>{postData.title} | Luke Granger-Brown</title>
<link rel="icon" href="/favicon.ico" />
</Head>
<main className={styles.main}>
<div className={styles.post} dangerouslySetInnerHTML={{ __html: postData.contentHtml }} />
</main>
</div>
)
}

View file

@ -0,0 +1,32 @@
import Head from 'next/head'
import Link from 'next/link'
import styles from '../styles/Post.module.scss'
import HeaderNav from '../lib/HeaderNav'
import HeroImage from '../lib/HeroImage'
export default function Toolbox() {
return (
<div className={styles.container}>
<HeaderNav />
<HeroImage image="https://images.unsplash.com/photo-1465659504169-09fe8930ccae" credit={{ url: "https://images.unsplash.com/photo-1465659504169-09fe8930ccae", text: "kychan" }} withGradient="top-blue">
Toolbox
</HeroImage>
<Head>
<title>Toolbox | Luke Granger-Brown</title>
<link rel="icon" href="/favicon.ico" />
</Head>
<main className={styles.main}>
<div className={styles.post}>
A collection of tools. Maybe some will be useful.
<ul>
<li>
<Link href="/tools/net"><a>Networking Tools</a></Link>
</li>
</ul>
</div>
</main>
</div>
)
}

View file

@ -0,0 +1,27 @@
import Head from 'next/head'
import Link from 'next/link'
import styles from '../../styles/Post.module.scss'
import HeaderNav from '../../lib/HeaderNav'
import HeroImage from '../../lib/HeroImage'
import NetworkingTools from '../../lib/NetworkingTools'
export default function NetTools() {
return (
<div className={styles.container}>
<HeaderNav />
<HeroImage image="https://farm9.staticflickr.com/8218/8437956869_05a4e887b0_k_d.jpg" credit={{ url: "https://www.flickr.com/photos/npobre/8437956869/", text: "Norlando Pobre, Flickr" }} withGradient="top-blue">
Networking Tools
</HeroImage>
<Head>
<title>Networking Tools | Luke Granger-Brown</title>
<link rel="icon" href="/favicon.ico" />
</Head>
<main className={styles.main}>
<div className={styles.post}>
<NetworkingTools />
</div>
</main>
</div >
)
}

View file

@ -0,0 +1,91 @@
---
title: Setting the Scene
date: 2016-01-25T16:44:00
layout: Post
hero: https://source.unsplash.com/7P_2hzKryQE/1270x952
hero credit: https://unsplash.com/photos/7P_2hzKryQE
hero credit text: Luis Llerena
classes:
header: header-black-gradient
tags:
- university tech
---
This must, by now, be my fifth or sixth attempt at a blog. As per usual, I'll tell myself that I'll do better this time, and I'll write regularly, and post when I find solutions to interesting problems.
---
Whatever, hopefully I'll at least manage to keep some useful content here.
To set the scene for some of the material I'll be blogging about anyway:
I'm a university student at [Imperial College London](https://www.imperial.ac.uk) in the [Department of Computing](https://www.imperial.ac.uk/computing). As of this writing, I'm a third year, on the four-year MEng programme.
In my spare time, I indulge in a number of hobbies:
* Playing [Heroes of the Storm](https://eu.battle.net/heroes/en/), a "Hero Brawler" (or a MOBA...) by Blizzard Entertainment
* Running [Imperial Cinema](http://www.imperialcinema.co.uk), the student-run cinema of Imperial. This year, I'm the chair. Last year, I was treasurer. Ho hum.
* Acting as Media Technical Manager for Imperial College Union's Media Group.
## Heroes of the Storm
Not much to say here. If you like playing (EU West!), then poke me. Maybe I'll play a match or two with you - I'm not very good...
## Imperial Cinema
There's a lot of fun technical stuff that goes on here. We've got:
* _Digital Projector_: Barco DP2K-10Sx (without the Barco Alchemy board)
* _Media Server_: Doremi ShowVault (with the web GUI hacks)
* _Audio Kit_: Some Flare surrounds, primarily because they're small
* _Servers_:
* `ernie`: webserver and database server. Stores ticketing info.
* `avantgarde`: acts as a bridge between our automation and the outside world
## Media Technical Manager
This is fun. I manage:
* `onyx`: an ESXi install, with a bunch of different OSs on it.
* `cog`: CentOS. On `onyx`. Runs our directory server - we use [FreeIPA](https://www.freeipa.org).
* `gears`: Debian. On `onyx`, bound to `cog`. This is the storage VM, running home directories, media storage for Rivendell. Supports NFS, AFP (netatalk) and SMB (Samba), as well as SFTP, if you have shell access rights.
* `galen`: Debian. On `onyx`. Media group webserver. Runs some websites.
* `hubs`: OS X Server. Mostly around for Profile Manager and Apple Remote Desktop.
* `icr-firestar`: Debian. Runs the [ICRadio](http://www.icradio.com) website.
* `icr-optimus`: CentOS. Central server for the [Rivendell](http://www.rivendellaudio.org) radio broadcast system we use.
* `stoic-blackadder`: Debian. Used for [ICTV](http://www.imperialcollege.tv)'s playout system.
* `stoic-dexter`: Debian. Also used for various internal bits of ICTV.
* `stoic-hannibal`: Windows, used for live video production, running [CasparCG](http://www.casparcg.com)
* `nucleus`: OS X Server (again). This is a Mac Mini.
as well as a bunch of Mac clients of varying vintages:
* `silverbolt`
* `airraid`
* `slingshot`
* `fireflight`
* `skydive`
* `groove`
* `streetwise`
* `superion`
* `omegasupreme`
* `tachyon`
* `lightspeed`
* `cliffjumper`
* `stoic-macedit1`
* `hyperon`
* `boron`
...some Mac Minis used for live production purposes:
* `hardhead`
* `chromedome`
* `brainstorm`
...some Apple TVs:
* `felix-atv`
* `media-atv`
There's quite a bit here, and all the Mac clients are bound to `cog` for directory purposes, and to `hubs` for Profile Management (and Munki).

View file

@ -0,0 +1,159 @@
---
title: Secure Boot Shenanigans
date: 2016-11-11
layout: Post
tags:
- linux
hero: /assets/2016-11-11/banner.a6acff235ea8.jpg
classes:
header: header-black-gradient
---
I bought a [Razer Blade Stealth](http://www.razerzone.com/gaming-systems/razer-blade-stealth)
recently as an ultrabook I can take to lectures and just generally use when I'm out and about
as a companion to the Pixel C I'm already using. Generally just for those annoying edgecases
where it's not quite enough to just have Android ;)
---
Anyway, it's dual-booting Arch and Windows 10. I wanted to see how much I could lock the platform
down in terms of security, so, first steps...
## Secure Boot
My end goal here is to use my own PKI to sign binaries which I deem acceptable to run on my system.
I'm not going to go into the details of the PKI used for Secure Boot, but more... the specifics of
modifying my laptop's firmware to allow me to substitute my own PKI for the factory shipped one.
However: Secure Boot on the Razer Blade Stealth is... a tad lacking. Why?
Razer have hidden the option to get to the key management options on the Razer Blade Stealth,
for some reason. I'm too lazy to contact Razer support to get a modified firmware, and I don't know
if they would even oblige, or just tell me to disable Secure Boot.
![Where's my Key Management option?](/assets/2016-11-11/firmware-secure-boot-before.35da7f6fed59.jpg)
*Note that I went back and took this picture afterwards, which is why this is already in User mode and the Vendor Keys are not active.*
But that's OK. The Razer Blade Stealth uses [AptioV from AMI](https://ami.com/products/bios-uefi-firmware/aptio-v/)
as its firmware, which is fairly well understood...
### Notes
* Using KeyTool.efi, I quickly discovered that the PK shipped with my system was the AMI
Test PK: `DO NOT TRUST - AMI Test PK`. *sigh*
### Modifying the firmware, or "this way leads to insanity and a voided warranty"
```txt
WARNING WARNING WARNING: There's a great potential to make your shiny new ultrabook into a £999+ brick.
I'm not responsible if you do this.
WARNING WARNING: This will almost certainly void your warranty. Maybe don't do this if you
like your warranty?
WARNING: if you do this, you probably want to not have Bitlocker enabled first, or at the very least
to have your Bitlocker recovery key first. Replacing your system's firmware *will* change the TPM
measurements and cause unsealing to fail (by design!).
```
#### Dumping the firmware
Using AMI's AFUWIN tool [(download)](https://www.wimsbios.com/amiflasher.jsp), it's possible to
dump (hint: the `Save` button is your friend!) the BIOS included with the system, since Razer have
not published any firmware updates for this system yet, so we can't just download it.
BACK THIS FILE UP. You never know when you might need it(!).
OK, great, you have a dump of your system's firmware (excluding the Management Engine's firmware).
What now?
#### Taking a peek under the hood
Well, using [UEFITool](https://github.com/LongSoft/UEFITool) it's possible to unpack the ROM image
that you've dump and make modifications to it. If you're following along, then as of writing, you
should use the older [0.21.5](https://github.com/LongSoft/UEFITool/releases/tag/0.21.5) version
which supports repacking the ROM with changes.
Once you've done that, you need to locate the UEFI binary which is the configuration utility
(i.e. the thing with all the settings which most people think of when you say "BIOS"). The easiest
way to find this for me was to just search for anything containing the Unicode string "Key Management"(!)
Having found this binary, you can then dump it to a separate file (using **Extract body...** not
**Extract as is...**). As a brief overview, this binary contains the setup menus, and often contains
a bunch of functionality that's been masked out by the OEM, but which is still compiled in to the binary.
This includes, thankfully, the options for resetting the system back into Secure Boot Setup Mode.
A tool called [Universal IFR Extractor](https://github.com/donovan6000/Universal-IFR-Extractor) can help
shine some light on when this is the case, by showing you a textual representation of what the *I*nternal
*F*orms *R*epresentation tree looks like. It's super helpful, because it also displays the hex representation
alongside, as well as displaying the location in the binary where it was found.
In my case, I discovered that there's a check to see if the variable `0xB0E` is set to `0x0`, and if
so then the interesting menu items are hidden. Boo. We can fix that using a hex editor ;)
#### Hmm... I want... ALL OF THEM
By replacing the byte sequence `0A 82 12 06 0E 0B 00 00` (suppress if: variable 0xB0E has value 0x0)
with `0A 82 12 06 0E 0B 00 FF` (suppress if: variable 0xB0E has value 0xFF00), then we can force
all of these previously-hidden menu items to be shown. I'm being a bit rash here and just hoping that
this doesn't appear in the text section of the binary. Feel free to be a bit more surgical ;)
Having done that, you can now repack the ROM using UEFITool (using **Replace body...**), and then
save it (*not* over the top of your clean ROM!).
#### The Dangerous Bit
**You should reread the warnings I wrote above before doing this. It's not my fault if you brick your
system, even if you follow all of these instructions to the letter.**
Again using AFUWIN you can take your freshly unlocked ROM and flash it to your system.
Once that's done, cross your fingers and reboot! Hopefully you should now seen the `Key Management`
options under `Secure Boot` in the options :)
![Secure Boot menu after doing some dangerous hacks](/assets/2016-11-11/firmware-secure-boot-after.c50a7559d0be.jpg)
![The newly-unhidden Secure Boot Key Management menu](/assets/2016-11-11/firmware-secure-boot-keys-after.bfbe6608b071.jpg)
## Drive Encryption
### Windows
Windows 10 Pro ships with Bitlocker, and the Razer Blade Stealth has a TPM, which means
that you can attest to the state of the system using the TPM to ensure that you're being booted
on the hardware (and with the software configuration) you think you're being booted on.
However, the RBS only ships with Windows 10 Home, so after entering my W10 Pro license key, and
waiting what felt like an age for it to apply the additional features, and reboot (twice!), I
managed to Bitlocker my drive.
For additional security, you can optionally [set it up in TPM+PIN mode](http://www.howtogeek.com/262720/how-to-enable-a-pre-boot-bitlocker-pin-on-windows/),
but I'm not entirely convinced this provides any serious additional benefit beyond a BIOS/disk
unlock password (with TPMed Bitlocker).
### Arch
Arch has dm-crypt support, which is both fairly standard and boring. I'm using LUKS+dm-crypt,
with a reasonably long passphrase. It looks there's some interest in
[TrustedGrub](https://projects.sirrix.com/trac/trustedgrub/) to use the TPM measurements to
then unseal a key, similarly to Bitlocker, but I haven't done this.
### UEFI
I added a disk unlock password, as well as a UEFI administrator password to avoid unauthorised
changes to the boot order or to the UEFI configuration. Boot from USB is also disabled and removed
from the boot order.
## Bonus Pictures
Before:
![Advanced menu before](/assets/2016-11-11/firmware-advanced-before.e8758168cdad.jpg)
...and after:
![Advanced menu after](/assets/2016-11-11/firmware-advanced-after.a32bd6fe67aa.jpg)
As well as my new boot splash:
![Boot splash](/assets/2016-11-11/firmware-bootsplash-after.6bc3fe7087f9.jpg)

View file

@ -0,0 +1,164 @@
---
title: FreeNAS, FreeIPA, Samba and Kerberos
date: 2017-02-19
layout: Post
tags:
- university tech
---
As a foreword: the below solution is *not* recommended - it relies on a prerelease version of FreeNAS for some of its functionality, which isn't supported.
FreeNAS 10 comes with the ability to bind to a FreeIPA directory. Hooray! Let's try it out.
---
## Struggling with binding to the directory
Hmm, using the GUI to bind to the directory doesn't seem to work at all, or even create the entry. Let's try the CLI instead.
After spending a few minutes learning how the CLI works, I got it down to the following commands:
```txt
freenas# cli
Welcome to the FreeNAS CLI! Type 'help' to get started.
[...snip...]
unix::>directoryservice directories
unix::/directoryservice/directories>create media type=freeipa enumerate=yes enabled=no
unix::/directoryservice/directories>media properties
unix::/directoryservice/directories/media/properties>set realm=media.su.ic.ac.uk username=<privileged username> password=<privileged password> server=cog.media.su.ic.ac.uk
unix::/directoryservice/directories/media/properties>..
unix::/directoryservice/directories/media>set enabled=yes
```
Turns out that because of the setup (the SRV records are misconfigured if you're doing a Kerberos bind to the LDAP server - the LDAP server doesn't have a keytab for ldap.media.su.ic.ac.uk), you need to set the `server` property or FreeNAS gives you some random Python exception. Ho hum.
Having bound to the directory you can set things up as usual on a FreeNAS system, creating shares, but wait...
## FreeNAS doesn't seem to respect groups
There appears to be a bug in FreeNAS' dscache plugin for FreeIPA -- it doesn't find any groups other than the main POSIX group.
At a first glance, this appears to be because it's searching for all groups by `dn`, which isn't a property you can filter on in an LDAP search. Bah.
I applied the following patch. I should probably contribute this back to https://github.com/freenas/middleware...
```diff
diff -u a/FreeIPAPlugin.py b/FreeIPAPlugin.py
--- a/FreeIPAPlugin.py 2017-02-19 18:46:41.508852583 +0000
+++ b/FreeIPAPlugin.py 2017-02-19 18:48:03.768854453 +0000
@@ -32,6 +32,7 @@
import logging
import errno
import krb5
+from collections import defaultdict
from threading import Thread, Condition
from datetime import datetime
from plugin import DirectoryServicePlugin, DirectoryState
@@ -55,6 +56,14 @@
logger = logging.getLogger(__name__)
+def _split_bases(dns):
+ out = defaultdict(list)
+ for dn in dns:
+ rdn, _, base_dn = dn.partition(',')
+ out[base_dn].append(rdn)
+ return out
+
+
class FreeIPAPlugin(DirectoryServicePlugin):
def __init__(self, context):
self.context = context
@@ -124,14 +133,13 @@
group = dict(ret['attributes'])
if get(entry, 'memberOf'):
- builder = LdapQueryBuilder()
- qstr = builder.build_query([
- ('dn', 'in', get(entry, 'memberOf'))
- ])
-
- for r in self.search(self.base_dn, qstr):
- r = dict(r['attributes'])
- groups.append(get(r, 'ipaUniqueID.0'))
+ for base_dn, rdns in _split_bases(get(entry, 'memberOf')).items():
+ qstr = '(|({0}))'.format(')('.join(rdns))
+
+ for r in self.search(base_dn, qstr):
+ r = dict(r['attributes'])
+ if get(r, 'ipaUniqueID.0'):
+ groups.append(get(r, 'ipaUniqueID.0'))
if contains(entry, 'ipaNTHash'):
nthash = binascii.hexlify(entry['ipaNTHash']).decode('ascii')
```
One `cli system reboot` later, `groups leg13` is now correctly showing all of my groups. Hurrah!
## Authenticating Samba against passwords
By default, however, this setup won't work, since FreeNAS won't have permission to read the `ipaNTHash` attribute on users. Per https://bugs.freenas.org/issues/19976#note-24, the following commands sort that out too:
```txt
freeipa$ ipa permission-add 'ipaNTHash service read' --attrs=ipaNTHash --type=user --right=read
freeipa$ ipa privilege-add 'SMB services'
freeipa$ ipa privilege-add-permission 'SMB services' --permissions='ipaNTHash service read'
freeipa$ ipa role-add trustagent --desc="Trust agent (e.g. Samba servers)"
freeipa$ ipa role-add-privilege trustagent --privileges='SMB services'
freeipa$ ipa role-add-member trustagent --users=<trusted binding user>
```
To check, use:
```txt
freenas# dispatcherctl call dscached.account.getpwnam '"admin"'
```
which should show non-`null` entries for `"nthash"` and `"sid"`.
and
```txt
freenas# pdbedit -Lw admin
```
which shouldn't show Xs in the fourth column.
## Authenticating Samba against FreeIPA Kerberos
This is all well and good, but it would be nice if clients with valid Kerberos tickets could also authenticate...
First, FreeIPA needs to know about the FreeNAS server, since FreeNAS doesn't do a "proper" directory bind:
```txt
freeipa$ ipa host-add sparkplug.media.su.ic.ac.uk
freeipa$ ipa service-add cifs/sparkplug.media.su.ic.ac.uk
freeipa$ ipa service-add-host cifs/sparkplug.media.su.ic.ac.uk --hosts=sparkplug.media.su.ic.ac.uk
```
Then, you can fetch a keytab for it:
```txt
freeipa$ ipa-getkeytab -p cifs/sparkplug.media.su.ic.ac.uk -k sparkplug.kt
```
Communicate this to some location (I used `/root/sparkplug.kt`) on the FreeNAS box, then:
```txt
freenas# cli
unix::>directoryservice kerberos keytab
unix::/directoryservice/kerberos/keytab>create cifs keytab=/root/sparkplug.kt
```
This will add the keys in the keytab to `/etc/krb5.keytab`. Excellent.
Now we need to configure Samba on FreeNAS to respect the Keytab:
```txt
freenas# net conf setparm global 'realm' 'MEDIA.SU.IC.AC.UK'
freenas# net conf setparm global 'kerberos method' 'system keytab'
freenas# net conf setparm global 'security' 'ads'
```
...and that should be that!

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 215 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 285 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

1
web/lukegbcom/shell.nix Normal file
View file

@ -0,0 +1 @@
(import ./node-overrides.nix {}).shell

View file

@ -0,0 +1,8 @@
.latestPostsHeader {
text-align: center;
margin-bottom: 0;
}
.container {
margin-bottom: 2rem;
}

View file

@ -0,0 +1,13 @@
.container {
margin-bottom: 2rem;
}
.main {
display: flex;
flex-direction: column;
align-items: center;
}
.post {
max-width: var(--maxWidth);
width: 100%;
}

View file

@ -0,0 +1,30 @@
@use "highlight.global.css";
html,
body {
padding: 0;
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Oxygen, Ubuntu, Cantarell, Fira Sans, Droid Sans,
Helvetica Neue, sans-serif;
}
a {
color: inherit;
text-decoration: none;
&:hover {
text-decoration: underline;
}
}
* {
box-sizing: border-box;
}
html {
--colorNeutralLight: #fbfcfc;
--maxWidth: 61rem;
}
body {
color: #555;
}

View file

@ -0,0 +1,106 @@
/**
* You can try all official Highlight.js theme here
*
* https://highlightjs.org/static/demo/
*
* More themes here
*
* https://github.com/isagalaev/highlight.js/tree/master/src/styles
*/
/*
Atom One Dark by Daniel Gamage
Original One Dark Syntax theme from https://github.com/atom/one-dark-syntax
base: #282c34
mono-1: #abb2bf
mono-2: #818896
mono-3: #5c6370
hue-1: #56b6c2
hue-2: #61aeee
hue-3: #c678dd
hue-4: #98c379
hue-5: #e06c75
hue-5-2: #be5046
hue-6: #d19a66
hue-6-2: #e6c07b
*/
.hljs {
display: block;
overflow-x: auto;
padding: 0.5em;
color: #abb2bf;
background: #282c34;
}
.hljs-comment,
.hljs-quote {
color: #5c6370;
font-style: italic;
}
.hljs-doctag,
.hljs-keyword,
.hljs-formula {
color: #c678dd;
}
.hljs-section,
.hljs-name,
.hljs-selector-tag,
.hljs-deletion,
.hljs-subst {
color: #e06c75;
}
.hljs-literal {
color: #56b6c2;
}
.hljs-string,
.hljs-regexp,
.hljs-addition,
.hljs-attribute,
.hljs-meta-string {
color: #98c379;
}
.hljs-built_in,
.hljs-class .hljs-title {
color: #e6c07b;
}
.hljs-attr,
.hljs-variable,
.hljs-template-variable,
.hljs-type,
.hljs-selector-class,
.hljs-selector-attr,
.hljs-selector-pseudo,
.hljs-number {
color: #d19a66;
}
.hljs-symbol,
.hljs-bullet,
.hljs-link,
.hljs-meta,
.hljs-selector-id,
.hljs-title {
color: #61aeee;
}
.hljs-emphasis {
font-style: italic;
}
.hljs-strong {
font-weight: bold;
}
.hljs-link {
text-decoration: underline;
}