Compare commits
10 commits
1d1c0d6139
...
3ddd3dec8e
Author | SHA1 | Date | |
---|---|---|---|
Skye | 3ddd3dec8e | ||
Artemis Tosini | 72fbcb42d6 | ||
Artemis Tosini | 358e68c8db | ||
Artemis Tosini | b8d6c6b8c4 | ||
Artemis Tosini | cef0b38f11 | ||
Artemis Tosini | 01fef3d182 | ||
Artemis Tosini | c64c23440e | ||
Artemis Tosini | 88f39c00c8 | ||
Artemis Tosini | e445bdc999 | ||
Artemis Tosini | 14a4a1825c |
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1,5 +1,6 @@
|
||||||
result
|
result
|
||||||
.direnv
|
.direnv
|
||||||
|
.vscode/*
|
||||||
|
|
||||||
*.pdf
|
*.pdf
|
||||||
addresses.csv
|
addresses.csv
|
||||||
|
@ -11,5 +12,6 @@ cache/
|
||||||
__pycache__/
|
__pycache__/
|
||||||
|
|
||||||
options.json
|
options.json
|
||||||
|
secrets.json
|
||||||
next_serial.txt
|
next_serial.txt
|
||||||
mailer_id.txt
|
mailer_id.txt
|
||||||
|
|
23
cards_4x6.typ
Normal file
23
cards_4x6.typ
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
#{
|
||||||
|
set page(width: 6in, height: 4in, margin: 0em)
|
||||||
|
|
||||||
|
import "common.typ"
|
||||||
|
|
||||||
|
let options = json("options.json")
|
||||||
|
let cards = options.cards
|
||||||
|
let args = options.args
|
||||||
|
|
||||||
|
let content_fn = if args.no_content {
|
||||||
|
_ => []
|
||||||
|
} else {
|
||||||
|
import "content/content.typ"
|
||||||
|
content.content
|
||||||
|
}
|
||||||
|
|
||||||
|
for (idx, card) in cards.enumerate() {
|
||||||
|
if idx != 0 {
|
||||||
|
pagebreak()
|
||||||
|
}
|
||||||
|
common.postcard_content(100%, 100%, content_fn, card)
|
||||||
|
}
|
||||||
|
}
|
12
cards_a6_lettersheet.typ
Normal file
12
cards_a6_lettersheet.typ
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
#{
|
||||||
|
set page("us-letter", margin: 0em)
|
||||||
|
|
||||||
|
import "common.typ"
|
||||||
|
|
||||||
|
let options = json("options.json")
|
||||||
|
let cards = options.cards
|
||||||
|
let args = options.args
|
||||||
|
|
||||||
|
common.card_sheets(148mm, 105mm, 1in/16, args, cards)
|
||||||
|
|
||||||
|
}
|
|
@ -38,17 +38,18 @@
|
||||||
place(
|
place(
|
||||||
top + left,
|
top + left,
|
||||||
dy: height - 1in/8,
|
dy: height - 1in/8,
|
||||||
|
dx: 0.05in,
|
||||||
block(
|
block(
|
||||||
width: 100%,
|
width: 100%,
|
||||||
height: 1in/8,
|
height: 1in/8,
|
||||||
align(
|
align(
|
||||||
top + center,
|
top + left,
|
||||||
text(font: "USPSIMBCompact", size: 12pt, card.imb)
|
text(font: "USPSIMBCompact", size: 12pt, card.imb)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if card.avatar != "" {
|
if card.avatar != none {
|
||||||
place_avatar(text_height, card)
|
place_avatar(text_height, card)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -115,7 +116,7 @@
|
||||||
let content_fn = if args.no_content {
|
let content_fn = if args.no_content {
|
||||||
_ => []
|
_ => []
|
||||||
} else {
|
} else {
|
||||||
import "content/content.typ"
|
import "cache/content/content.typ"
|
||||||
content.content
|
content.content
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
BIN
fonts/Caveat.ttf
Normal file
BIN
fonts/Caveat.ttf
Normal file
Binary file not shown.
BIN
fonts/NotoEmoji.ttf
Normal file
BIN
fonts/NotoEmoji.ttf
Normal file
Binary file not shown.
193
format.py
193
format.py
|
@ -2,14 +2,29 @@
|
||||||
import argparse
|
import argparse
|
||||||
import base64
|
import base64
|
||||||
import csv
|
import csv
|
||||||
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import pathlib
|
||||||
import string
|
import string
|
||||||
import typing
|
import typing
|
||||||
|
import urllib.parse
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
import requests
|
import requests
|
||||||
import imb
|
import imb
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import TypedDict
|
||||||
|
|
||||||
|
# A lot of stuff needs to be in the same directory, just chdir
|
||||||
|
os.chdir(os.path.dirname(os.path.realpath(__file__)))
|
||||||
|
|
||||||
|
|
||||||
|
def cache_dir() -> Path:
|
||||||
|
cache = Path("cache")
|
||||||
|
cache.mkdir(exist_ok=True)
|
||||||
|
return cache
|
||||||
|
|
||||||
|
|
||||||
def iso_code(s: str) -> str:
|
def iso_code(s: str) -> str:
|
||||||
if len(s) != 2:
|
if len(s) != 2:
|
||||||
|
@ -20,43 +35,87 @@ def iso_code(s: str) -> str:
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
||||||
def get_orig_avatar(url: str, name: str) -> typing.Optional[bytes]:
|
def get_discord_avatar(
|
||||||
if not os.path.exists("cache"):
|
url: urllib.parse.ParseResult, secrets: dict[str, str]
|
||||||
os.mkdir("cache")
|
) -> typing.Optional[str]:
|
||||||
if os.path.exists("cache/" + name):
|
try:
|
||||||
with open("cache/" + name, "rb") as infile:
|
uid = url.path
|
||||||
|
token = secrets["discord_token"]
|
||||||
|
user_info = requests.get(
|
||||||
|
f"https://discord.com/api/users/{uid}",
|
||||||
|
headers={"Authorization": f"Bot {token}"},
|
||||||
|
).json()
|
||||||
|
avatar_hash = user_info["avatar"]
|
||||||
|
return f"https://cdn.discordapp.com/avatars/{uid}/{avatar_hash}.png?size=4096"
|
||||||
|
except KeyError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_fedi_avatar(
|
||||||
|
url: urllib.parse.ParseResult, secrets: dict[str, str]
|
||||||
|
) -> typing.Optional[str]:
|
||||||
|
try:
|
||||||
|
mastodon_api = secrets["mastodon_api"]
|
||||||
|
user_info = requests.get(
|
||||||
|
f"{mastodon_api}/api/v1/accounts/lookup", params={"acct": url.path}
|
||||||
|
).json()
|
||||||
|
avatar_url = user_info["avatar_static"]
|
||||||
|
return avatar_url
|
||||||
|
except KeyError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_orig_avatar(
|
||||||
|
url: str, basename: str, secrets: dict[str, str]
|
||||||
|
) -> typing.Optional[bytes]:
|
||||||
|
url_parts = urllib.parse.urlparse(url)
|
||||||
|
if url_parts.scheme == "fedi":
|
||||||
|
real_url = get_fedi_avatar(url_parts, secrets)
|
||||||
|
elif url_parts.scheme == "discord":
|
||||||
|
real_url = get_discord_avatar(url_parts, secrets)
|
||||||
|
else:
|
||||||
|
real_url = url
|
||||||
|
|
||||||
|
if real_url is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
img_file = cache_dir() / basename
|
||||||
|
|
||||||
|
if img_file.exists():
|
||||||
|
with img_file.open("rb") as infile:
|
||||||
return infile.read()
|
return infile.read()
|
||||||
result = requests.get(url)
|
result = requests.get(real_url)
|
||||||
if result.ok:
|
if not result.ok:
|
||||||
with open("cache/" + name, "wb") as outfile:
|
return None
|
||||||
outfile.write(result.content)
|
with img_file.open("wb") as outfile:
|
||||||
return result.content
|
outfile.write(result.content)
|
||||||
return None
|
return result.content
|
||||||
|
|
||||||
|
|
||||||
def get_avatar(url: str) -> str:
|
def get_avatar(url: str, secrets: dict[str, str]) -> str | None:
|
||||||
name = url.split("?")[0].split("/")[-1]
|
basename = hashlib.sha256(url.encode("utf-8")).hexdigest()
|
||||||
if os.path.exists(f"cache/{name}.svg"):
|
file_path = cache_dir() / f"{basename}.svg"
|
||||||
return f"cache/{name}.svg"
|
if file_path.exists():
|
||||||
avatar_raster = get_orig_avatar(url, name)
|
return str(file_path)
|
||||||
|
avatar_raster = get_orig_avatar(url, basename, secrets)
|
||||||
if avatar_raster is None:
|
if avatar_raster is None:
|
||||||
return ""
|
return None
|
||||||
|
|
||||||
svg_text = f"""<svg viewBox="0 0 480 480" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
svg_text = f"""<svg viewBox="0 0 512 512" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
<clipPath id="circle">
|
<clipPath id="circle">
|
||||||
<circle cx="240" cy="240" r="240" />
|
<circle cx="256" cy="256" r="256" />
|
||||||
</clipPath>
|
</clipPath>
|
||||||
<image width="480" height="480" clip-path="url(#circle)"
|
<image width="512" height="512" clip-path="url(#circle)"
|
||||||
xlink:href="data:;base64,{base64.b64encode(avatar_raster).decode("utf-8")}" />
|
xlink:href="data:;base64,{base64.b64encode(avatar_raster).decode("utf-8")}" />
|
||||||
</svg>"""
|
</svg>"""
|
||||||
|
|
||||||
with open(f"cache/{name}.svg", "w") as svgfile:
|
with open(file_path, "w") as svgfile:
|
||||||
svgfile.write(svg_text)
|
svgfile.write(svg_text)
|
||||||
return f"cache/{name}.svg"
|
return str(file_path)
|
||||||
|
|
||||||
|
|
||||||
def get_country_name(
|
def get_country_name(
|
||||||
root: ET.ElementTree, destination: str, alt=None
|
root: ET.ElementTree, destination: str, alt: str | None = None
|
||||||
) -> typing.Optional[str]:
|
) -> typing.Optional[str]:
|
||||||
elements = root.findall(
|
elements = root.findall(
|
||||||
f"./localeDisplayNames/territories/territory[@type='{destination.upper()}']"
|
f"./localeDisplayNames/territories/territory[@type='{destination.upper()}']"
|
||||||
|
@ -107,52 +166,84 @@ parser.add_argument(
|
||||||
help="CSV file containing addresses",
|
help="CSV file containing addresses",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--content-path",
|
||||||
|
default="content",
|
||||||
|
type=str,
|
||||||
|
help="Directory containing content files",
|
||||||
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-n",
|
"-n",
|
||||||
"--no-content",
|
"--no-content",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Skip content, e.g. to make postcard back labels"
|
help="Skip content, e.g. to make postcard back labels",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-d", "--dont-compile", action="store_true", help="Don't compile to output.pdf"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument("-w", "--watch", action="store_true", help="Watch input files")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
root = ET.parse(
|
cldr_root = ET.parse(
|
||||||
f"{os.getenv('CLDR_ROOT')}/share/unicode/cldr/common/main/{args.language}.xml"
|
f"{os.getenv('CLDR_ROOT')}/share/unicode/cldr/common/main/{args.language}.xml"
|
||||||
)
|
)
|
||||||
|
|
||||||
csvfile = open(args.address_file)
|
csvfile = open(args.address_file)
|
||||||
rows = csv.DictReader(csvfile)
|
rows = csv.DictReader(csvfile)
|
||||||
|
|
||||||
|
with open("secrets.json") as secrets_file:
|
||||||
|
secrets = json.load(secrets_file)
|
||||||
|
|
||||||
current_serial = imb.get_first_serial()
|
current_serial = imb.get_first_serial()
|
||||||
mid = int(open("mailer_id.txt").read().strip())
|
mid = secrets.get("mailer_id")
|
||||||
|
|
||||||
|
|
||||||
cards = []
|
class Card(TypedDict):
|
||||||
|
address: str
|
||||||
|
avatar: str | None
|
||||||
|
row: dict[str, str]
|
||||||
|
imb: str
|
||||||
|
|
||||||
|
|
||||||
|
cards: list[Card] = []
|
||||||
for row in rows:
|
for row in rows:
|
||||||
if row["Address"] == "":
|
if row["Address"] == "":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
country = (
|
if row["Country"].lower() == args.origin:
|
||||||
[]
|
country = []
|
||||||
if row["Country"].lower() == args.origin
|
else:
|
||||||
else [get_country_name(root, row["Country"]).upper()]
|
name = get_country_name(cldr_root, row["Country"])
|
||||||
)
|
assert name is not None
|
||||||
|
country = [name.upper()]
|
||||||
|
|
||||||
address = row["Address"].split("\n") + country
|
address = row["Address"].split("\n") + country
|
||||||
|
|
||||||
if row.get("Avatar", "") != "":
|
if row.get("Avatar", "") != "":
|
||||||
avatar = get_avatar(row["Avatar"])
|
avatar = get_avatar(row["Avatar"], secrets)
|
||||||
else:
|
else:
|
||||||
avatar = None
|
avatar = None
|
||||||
|
|
||||||
cards += [
|
card: Card = {
|
||||||
{
|
"address": "\n".join(address),
|
||||||
"address": "\n".join(address),
|
"avatar": avatar,
|
||||||
"avatar": avatar,
|
"row": row,
|
||||||
"row": row,
|
"imb": "",
|
||||||
"imb": "",
|
}
|
||||||
}
|
cards.append(card)
|
||||||
]
|
|
||||||
|
# Typst can't access files outside the project root, except through a symlink
|
||||||
|
# Create one in cache to use here
|
||||||
|
if not os.path.exists("cache"):
|
||||||
|
os.mkdir("cache")
|
||||||
|
p = pathlib.Path("cache/content")
|
||||||
|
p.unlink(missing_ok=True)
|
||||||
|
p.symlink_to(args.content_path)
|
||||||
|
|
||||||
cards = cards * args.count
|
cards = cards * args.count
|
||||||
|
|
||||||
|
@ -160,7 +251,7 @@ serial = imb.get_first_serial()
|
||||||
if args.origin == "us":
|
if args.origin == "us":
|
||||||
for card in cards:
|
for card in cards:
|
||||||
dpc = card["row"].get("DPC", "")
|
dpc = card["row"].get("DPC", "")
|
||||||
if dpc != "":
|
if dpc != "" and mid is not None:
|
||||||
card["imb"] = imb.generate(
|
card["imb"] = imb.generate(
|
||||||
0, 310, mid, serial, dpc.replace(" ", "").replace("-", "")
|
0, 310, mid, serial, dpc.replace(" ", "").replace("-", "")
|
||||||
)
|
)
|
||||||
|
@ -175,3 +266,21 @@ with open("options.json", "w") as options:
|
||||||
"cards": cards,
|
"cards": cards,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if args.dont_compile:
|
||||||
|
exit()
|
||||||
|
|
||||||
|
font_paths = os.getenv("TYPST_FONT_PATHS")
|
||||||
|
assert font_paths is not None
|
||||||
|
|
||||||
|
os.execlp(
|
||||||
|
"typst",
|
||||||
|
"typst",
|
||||||
|
"watch" if args.watch else "compile",
|
||||||
|
"--font-path",
|
||||||
|
args.content_path,
|
||||||
|
"--font-path",
|
||||||
|
font_paths,
|
||||||
|
args.template,
|
||||||
|
"output.pdf",
|
||||||
|
)
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
top + left,
|
top + left,
|
||||||
dx: 1in/16,
|
dx: 1in/16,
|
||||||
dy: 1in/16,
|
dy: 1in/16,
|
||||||
common.address_block(2.5in, 1in, card)
|
common.address_block(3in, 1in, card)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue