TIL April 27

This blog has now moved to Amazon Amplify. It’s connected to a Bitbucket git repository and AWS pulls it at the moment it’s pushed. I was polling the repository manually in a VPS but this is much quicker.


import sys
import re
from dateutil import parser
import datetime
import os

fn = sys.argv[1]

fread = open(fn, encoding="utf-8")
lines = fread.readlines()

fmorig = []

for i, l in enumerate(lines):
    if len(l.strip()) == 0:
        blank_line_i = i


fmtarget = {}

for fm in fmorig:
    if fm.startswith('Date'):
        dt = parser.parse(fm[5:].strip())
        fmtarget['date'] = dt
        expiryDate = dt + datetime.timedelta(days=365)
        fmtarget['expiryDate'] = expiryDate
    elif fm.startswith('Title'):
        fmtarget['title'] = fm[6:].strip()
    elif fm.startswith('Author'):
        fmtarget['author'] = fm[7:].strip()
    elif fm.startswith('Image'):
        imageName = fm[6:].strip().split('/')[2]
        fmtarget['image'] = "/images/{}".format(imageName)
    elif fm.startswith('Status'):
        fmtarget['status'] = fm[7:].strip()
    elif fm.startswith('Dp'):
        fmtarget['dp'] = fm[3:].strip()
    elif fm.startswith('Tags'):
        fmtarget['tags'] = fm[5:].strip().split(',')
        fmtarget[fm] = fm

dt = fmtarget['date']
dn, bn = os.path.split(fn)

newfrontmatter = """---
title: "{title}"
date: {dt}
expiryDate: {expiryDate}
dp: {dp}
featured_image: "{image}"
images: ["{image}"]
published: {status}
tags: [{tags}]
           dt=fmtarget['date'].strftime("%F %H:%M:%S"),
           expiryDate = fmtarget['expiryDate'].strftime("%F %H:%M:%S"),
           dp = fmtarget['dp'],
           image = fmtarget['image'],
           status = "true" if fmtarget['status'] == "published" else "false",
           tags = ",".join(fmtarget['tags']) if 'tags' in fmtarget else '')


""".format(newfrontmatter, "".join(lines[blank_line_i:]))


backup_filename = fn + ".old"
os.rename(fn, backup_filename)

fwrite = open(fn, mode="w", encoding="utf-8")

You need to add the following to config.toml

home = [ "RSS", "HTML"]

mediatype = "application/rss"
baseName = "rss"

To make an S3 bucket public, you need to add

    "Version": "2012-10-17",
    "Statement": [
            "Sid": "PublicReadGetObject",
            "Effect": "Allow",
            "Principal": "*",
            "Action": "s3:GetObject",
            "Resource": "arn:aws:s3:::subdomain.example.com/*"

to BucketPolicy.

It’s possible to give permissions of subfolders by right clicking on them but for the entire bucket, you need to write this.