mirror of
https://github.com/kmein/niveum
synced 2026-03-16 10:11:08 +01:00
package .bin/ scripts as proper nix packages, delete .bin/
Packaged 14 scripts from .bin/ into packages/ with proper dependency declarations (writers.writeDashBin/writeBashBin/writePython3Bin): - 256color → two56color (terminal color chart) - avesta.sed → avesta (Avestan transliteration) - bvg.sh → bvg (Berlin transit disruptions) - unicode → charinfo (Unicode character info) - chunk-pdf → chunk-pdf (split PDFs by page count) - csv2json → csv2json (CSV to JSON converter) - fix-sd.sh → fix-sd (exFAT SD card recovery, improved output handling) - json2csv → json2csv (JSON to CSV converter) - mp3player-write → mp3player-write (audio conversion for MP3 players) - mushakkil.sh → mushakkil (Arabic diacritization) - nix-haddock-index → nix-haddock-index (GHC Haddock index generator) - pdf-ocr.sh → pdf-ocr (OCR PDFs via tesseract) - prospekte.sh → prospekte (German supermarket flyer browser) - readme → readme (GitHub README as man page) All added to overlay and packages output. .bin/ directory removed.
This commit is contained in:
37
packages/256color.nix
Normal file
37
packages/256color.nix
Normal file
@@ -0,0 +1,37 @@
|
||||
{
|
||||
writers,
|
||||
}:
|
||||
writers.writeDashBin "256color" ''
|
||||
pl() {
|
||||
for i in $(seq $1 $(expr $2 - 1)); do
|
||||
printf '\e[38;5;%sm%03i\e[m ' $i $i
|
||||
done
|
||||
printf '\e[38;5;%sm%03i\e[m\n' $2 $2
|
||||
}
|
||||
|
||||
p() {
|
||||
printf '\e[38;5;%sm%03i\e[m ' $1 $1
|
||||
}
|
||||
|
||||
p6x6() {
|
||||
for i in $(seq 0 5); do
|
||||
for j in $(seq 0 5); do
|
||||
p $(expr $1 + $i + $j \* 6)
|
||||
done
|
||||
echo
|
||||
done
|
||||
}
|
||||
|
||||
pl 0 7
|
||||
pl 8 15
|
||||
|
||||
p6x6 16
|
||||
p6x6 52
|
||||
p6x6 88
|
||||
p6x6 124
|
||||
p6x6 160
|
||||
p6x6 196
|
||||
|
||||
pl 232 243
|
||||
pl 244 255
|
||||
''
|
||||
65
packages/avesta.nix
Normal file
65
packages/avesta.nix
Normal file
@@ -0,0 +1,65 @@
|
||||
# Transliterate Latin-script Avestan to Avestan Unicode script
|
||||
{
|
||||
writers,
|
||||
gnused,
|
||||
}:
|
||||
let
|
||||
sedScript = builtins.toFile "avesta.sed" ''
|
||||
s/ā̊/𐬃/g
|
||||
s/t̰/𐬝/g
|
||||
s/ṣ̌/𐬴/g
|
||||
s/š́/𐬳/g
|
||||
s/ą̄/𐬅/g
|
||||
s/ŋᵛ/𐬤/g
|
||||
s/ə̄/𐬇/g
|
||||
s/ŋ́/𐬣/g
|
||||
s/x́/𐬒/g
|
||||
s/xᵛ/𐬓/g
|
||||
s/a/𐬀/g
|
||||
s/ā/𐬁/g
|
||||
s/å/𐬂/g
|
||||
s/ą/𐬄/g
|
||||
s/ə/𐬆/g
|
||||
s/e/𐬈/g
|
||||
s/ē/𐬉/g
|
||||
s/o/𐬊/g
|
||||
s/ō/𐬋/g
|
||||
s/i/𐬌/g
|
||||
s/ī/𐬍/g
|
||||
s/u/𐬎/g
|
||||
s/ū/𐬏/g
|
||||
s/k/𐬐/g
|
||||
s/x/𐬑/g
|
||||
s/g/𐬔/g
|
||||
s/ġ/𐬕/g
|
||||
s/γ/𐬖/g
|
||||
s/c/𐬗/g
|
||||
s/j/𐬘/g
|
||||
s/t/𐬙/g
|
||||
s/θ/𐬚/g
|
||||
s/d/𐬛/g
|
||||
s/δ/𐬜/g
|
||||
s/p/𐬞/g
|
||||
s/f/𐬟/g
|
||||
s/b/𐬠/g
|
||||
s/β/𐬡/g
|
||||
s/ŋ/𐬢/g
|
||||
s/n/𐬥/g
|
||||
s/ń/𐬦/g
|
||||
s/ṇ/𐬧/g
|
||||
s/m/𐬨/g
|
||||
s/m̨/𐬩/g
|
||||
s/ẏ/𐬫/g
|
||||
s/y/𐬪/g
|
||||
s/v/𐬬/g
|
||||
s/r/𐬭/g
|
||||
s/s/𐬯/g
|
||||
s/z/𐬰/g
|
||||
s/š/𐬱/g
|
||||
s/ž/𐬲/g
|
||||
s/h/𐬵/g
|
||||
'';
|
||||
in
|
||||
writers.writeDashBin "avesta" ''
|
||||
exec ${gnused}/bin/sed -f ${sedScript} "$@"
|
||||
''
|
||||
53
packages/bvg.nix
Normal file
53
packages/bvg.nix
Normal file
@@ -0,0 +1,53 @@
|
||||
# Berlin BVG transit disruption checker
|
||||
{
|
||||
writers,
|
||||
curl,
|
||||
jq,
|
||||
}:
|
||||
writers.writeDashBin "bvg" ''
|
||||
interesting="U6 N6 140 M46 184 N84"
|
||||
|
||||
${curl}/bin/curl -sSL 'https://www.bvg.de/disruption-reports/q' \
|
||||
--data-raw '{"variables":{},"query":"{
|
||||
allDisruptions {
|
||||
disruptions {
|
||||
meldungsId
|
||||
linie
|
||||
verkehrsmittel
|
||||
__typename
|
||||
... on Traffic {
|
||||
datum
|
||||
gueltigVonDatum
|
||||
gueltigVonZeit
|
||||
gueltigBisDatum
|
||||
gueltigBisZeit
|
||||
richtungName
|
||||
richtungHafasId
|
||||
beginnAbschnittName
|
||||
beginnAbschnittHafasId
|
||||
endeAbschnittName
|
||||
endeAbschnittHafasId
|
||||
textIntUrsache
|
||||
sev
|
||||
textIntAuswirkung
|
||||
umfahrung
|
||||
textWAPSMSUrsache
|
||||
textWAPSMSAuswirkung
|
||||
prioritaet
|
||||
__typename
|
||||
}
|
||||
}
|
||||
__typename
|
||||
}
|
||||
}"}' \
|
||||
| ${jq}/bin/jq --arg interesting "$interesting" '
|
||||
.data.allDisruptions.disruptions
|
||||
| map(select(
|
||||
(.linie as $linie
|
||||
| $interesting
|
||||
| split(" ")
|
||||
| index($linie))
|
||||
and (.["__typename"] == "Traffic")
|
||||
))
|
||||
'
|
||||
''
|
||||
17
packages/charinfo.nix
Normal file
17
packages/charinfo.nix
Normal file
@@ -0,0 +1,17 @@
|
||||
# Print Unicode character info for each character on stdin
|
||||
{
|
||||
writers,
|
||||
python3,
|
||||
}:
|
||||
writers.writePython3Bin "charinfo" {
|
||||
flakeIgnore = [ "E501" "E722" ];
|
||||
} ''
|
||||
import sys
|
||||
import unicodedata
|
||||
|
||||
for index, character in enumerate(sys.stdin.read().strip()):
|
||||
try:
|
||||
print(index, character, hex(ord(character)), unicodedata.category(character), unicodedata.name(character))
|
||||
except Exception:
|
||||
print(index, character, hex(ord(character)))
|
||||
''
|
||||
30
packages/chunk-pdf.nix
Normal file
30
packages/chunk-pdf.nix
Normal file
@@ -0,0 +1,30 @@
|
||||
# Split a PDF into chunks of N pages
|
||||
{
|
||||
writers,
|
||||
pdftk,
|
||||
gnugrep,
|
||||
coreutils,
|
||||
}:
|
||||
writers.writeDashBin "chunk-pdf" ''
|
||||
set -efu
|
||||
|
||||
INPUT_FILE="''${2:?Pass the PDF path as second argument.}"
|
||||
PAGES_PER_REPORT="''${1:?Pass the chunk size as first argument.}"
|
||||
|
||||
if [ ! -f "$INPUT_FILE" ]; then
|
||||
echo >&2 "File $INPUT_FILE does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TOTAL_PAGES="$(${pdftk}/bin/pdftk "$INPUT_FILE" dump_data | ${gnugrep}/bin/grep NumberOfPages | ${coreutils}/bin/cut -f2 -d' ')"
|
||||
|
||||
RUNS=$((TOTAL_PAGES/PAGES_PER_REPORT))
|
||||
|
||||
for run in $(${coreutils}/bin/seq 0 "$((RUNS-1))"); do
|
||||
start_page=$((run*PAGES_PER_REPORT+1))
|
||||
end_page=$(((run+1)*PAGES_PER_REPORT))
|
||||
output_file="chunk_$((run+1)).pdf"
|
||||
echo "splitting $INPUT_FILE from $start_page to $end_page into $output_file"
|
||||
${pdftk}/bin/pdftk "$INPUT_FILE" cat "$start_page-$end_page" output "$output_file"
|
||||
done
|
||||
''
|
||||
21
packages/csv2json.nix
Normal file
21
packages/csv2json.nix
Normal file
@@ -0,0 +1,21 @@
|
||||
# Convert CSV to JSON
|
||||
{
|
||||
writers,
|
||||
python3,
|
||||
}:
|
||||
writers.writePython3Bin "csv2json" {
|
||||
flakeIgnore = [ "E501" ];
|
||||
} ''
|
||||
import csv
|
||||
import json
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--delimiter", "-d", default=",", help="CSV field separator")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if __name__ == "__main__":
|
||||
json.dump(list(csv.DictReader(sys.stdin, delimiter=args.delimiter)), sys.stdout)
|
||||
''
|
||||
34
packages/fix-sd.nix
Normal file
34
packages/fix-sd.nix
Normal file
@@ -0,0 +1,34 @@
|
||||
# Recover files from a corrupted exFAT SD card
|
||||
{
|
||||
writers,
|
||||
exfatprogs,
|
||||
util-linux,
|
||||
coreutils,
|
||||
findutils,
|
||||
gnused,
|
||||
}:
|
||||
writers.writeDashBin "fix-sd" ''
|
||||
set -efu
|
||||
|
||||
drive="''${1:?Usage: fix-sd /dev/sdX [output-dir]}"
|
||||
output_dir="''${2:-$(${coreutils}/bin/mktemp -d "''${TMPDIR:-/tmp}/fix-sd-XXXXXX")}"
|
||||
mountpoint="$(${coreutils}/bin/mktemp -d "''${TMPDIR:-/tmp}/fix-sd-mount-XXXXXX")"
|
||||
|
||||
trap clean EXIT
|
||||
clean() {
|
||||
${util-linux}/bin/umount "$mountpoint" 2>/dev/null || true
|
||||
${coreutils}/bin/rmdir "$mountpoint" 2>/dev/null || true
|
||||
}
|
||||
|
||||
filenames="$(${exfatprogs}/bin/fsck.exfat "$drive" 2>&1 | ${gnused}/bin/sed -nE "s/.* file '(.*?)' is not allocated.*/\1/p")"
|
||||
${coreutils}/bin/mkdir -p "$mountpoint" "$output_dir"
|
||||
${util-linux}/bin/mount "$drive" "$mountpoint"
|
||||
|
||||
echo "$filenames" | while read -r filename; do
|
||||
[ -n "$filename" ] || continue
|
||||
${findutils}/bin/find "$mountpoint" -type f -name "$filename" -exec ${coreutils}/bin/cp {} "$output_dir" \;
|
||||
done
|
||||
|
||||
echo "Recovered files saved to $output_dir"
|
||||
${exfatprogs}/bin/fsck.exfat "$drive"
|
||||
''
|
||||
32
packages/json2csv.nix
Normal file
32
packages/json2csv.nix
Normal file
@@ -0,0 +1,32 @@
|
||||
# Convert JSON array of objects to CSV
|
||||
{
|
||||
writers,
|
||||
python3,
|
||||
}:
|
||||
writers.writePython3Bin "json2csv" {
|
||||
flakeIgnore = [ "E501" ];
|
||||
} ''
|
||||
import csv
|
||||
import json
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
json_list = json.load(sys.stdin)
|
||||
if not isinstance(json_list, list):
|
||||
print("JSON object is not a list.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if len(json_list) == 0:
|
||||
print("JSON list is empty.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
keys = set()
|
||||
for element in json_list:
|
||||
if isinstance(element, dict):
|
||||
keys |= element.keys()
|
||||
else:
|
||||
print("Non-dict element:", element, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
writer = csv.DictWriter(sys.stdout, fieldnames=list(keys))
|
||||
writer.writeheader()
|
||||
for element in json_list:
|
||||
writer.writerow(element)
|
||||
''
|
||||
89
packages/mp3player-write.nix
Normal file
89
packages/mp3player-write.nix
Normal file
@@ -0,0 +1,89 @@
|
||||
# Convert and transfer audio files to an MP3 player
|
||||
{
|
||||
writers,
|
||||
ffmpeg,
|
||||
coreutils,
|
||||
gnugrep,
|
||||
bash,
|
||||
}:
|
||||
writers.writeBashBin "mp3player-write" ''
|
||||
set -e
|
||||
|
||||
SPEED=1.0
|
||||
|
||||
while getopts ":s:" opt; do
|
||||
case $opt in
|
||||
s) SPEED=$OPTARG ;;
|
||||
\?) echo "Invalid option: -$OPTARG" >&2; exit 1 ;;
|
||||
:) echo "Option -$OPTARG requires a value." >&2; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
shift $((OPTIND -1))
|
||||
|
||||
if [ "$#" -lt 2 ]; then
|
||||
echo "Usage: mp3player-write [-s speed] MOUNT_POINT FILE1 [FILE2 ...]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MOUNT_POINT=$1
|
||||
shift
|
||||
FILES=("$@")
|
||||
|
||||
if [ ! -d "$MOUNT_POINT" ]; then
|
||||
echo "Error: Mount point '$MOUNT_POINT' does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TOTAL_SIZE=0
|
||||
for f in "''${FILES[@]}"; do
|
||||
if [ ! -f "$f" ]; then
|
||||
echo "Warning: File '$f' does not exist, skipping."
|
||||
continue
|
||||
fi
|
||||
FILE_SIZE=$(${coreutils}/bin/stat --printf="%s" "$f")
|
||||
TOTAL_SIZE=$((TOTAL_SIZE + FILE_SIZE / 2))
|
||||
done
|
||||
|
||||
AVAILABLE=$(${coreutils}/bin/df --output=avail "$MOUNT_POINT" | ${coreutils}/bin/tail -n 1)
|
||||
AVAILABLE=$((AVAILABLE * 1024))
|
||||
|
||||
if [ "$TOTAL_SIZE" -gt "$AVAILABLE" ]; then
|
||||
echo "Error: Not enough space. Required: $TOTAL_SIZE bytes, Available: $AVAILABLE bytes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Enough space available. Starting conversion..."
|
||||
|
||||
sanitize_filename() {
|
||||
local name
|
||||
name=$(${coreutils}/bin/basename "$1")
|
||||
name=''${name%.*}
|
||||
name=$(echo "$name" | ${coreutils}/bin/tr ' ' '_' | ${coreutils}/bin/tr -cd '[:alnum:]_-')
|
||||
echo "''${name:0:50}"
|
||||
}
|
||||
|
||||
for f in "''${FILES[@]}"; do
|
||||
[ -f "$f" ] || continue
|
||||
|
||||
existing_prefixes=$(${coreutils}/bin/ls "$MOUNT_POINT" | ${gnugrep}/bin/grep -E '^[0-9].*\.mp3$' | ${coreutils}/bin/sed -E 's/^([0-9]).*/\1/' | ${coreutils}/bin/sort -n | ${coreutils}/bin/uniq)
|
||||
for i in {0..9}; do
|
||||
if ! echo "$existing_prefixes" | ${gnugrep}/bin/grep -q "^$i$"; then
|
||||
PREFIX=$i
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
BASENAME=$(sanitize_filename "$f")
|
||||
OUT_PATTERN="$MOUNT_POINT/''${PREFIX}_%03d_''${BASENAME}.mp3"
|
||||
|
||||
echo "Converting '$f' to '$OUT_PATTERN' at speed $SPEED..."
|
||||
|
||||
${ffmpeg}/bin/ffmpeg -nostdin -i "$f" \
|
||||
-filter:a "atempo=$SPEED" \
|
||||
-ar 22050 -ac 1 -c:a libmp3lame -b:a 32k \
|
||||
-f segment -segment_time 300 \
|
||||
"$OUT_PATTERN"
|
||||
done
|
||||
|
||||
echo "All files processed successfully."
|
||||
''
|
||||
13
packages/mushakkil.nix
Normal file
13
packages/mushakkil.nix
Normal file
@@ -0,0 +1,13 @@
|
||||
# Add Arabic diacritics (tashkeel) to text via alsharekh.org
|
||||
{
|
||||
writers,
|
||||
curl,
|
||||
jq,
|
||||
}:
|
||||
writers.writeDashBin "mushakkil" ''
|
||||
${curl}/bin/curl -sSL 'https://diac.alsharekh.org/Diac/DiacText' \
|
||||
-H "Content-Type: application/json" \
|
||||
--data-raw "$(${jq}/bin/jq --raw-input '{word: ., type: 1}')" \
|
||||
--compressed \
|
||||
| ${jq}/bin/jq -r .diacWord
|
||||
''
|
||||
84
packages/nix-haddock-index.nix
Normal file
84
packages/nix-haddock-index.nix
Normal file
@@ -0,0 +1,84 @@
|
||||
# Generate a Haddock index page for all packages visible to the current GHC
|
||||
{
|
||||
writers,
|
||||
coreutils,
|
||||
gnugrep,
|
||||
gnused,
|
||||
graphviz,
|
||||
bash,
|
||||
}:
|
||||
writers.writeBashBin "nix-haddock-index" ''
|
||||
set -efux
|
||||
|
||||
if test -z "''${NIX_GHC-}"; then
|
||||
NIX_GHC=$(${coreutils}/bin/readlink -f "$(type -P ghc)")
|
||||
fi
|
||||
|
||||
if ! echo $NIX_GHC | ${gnugrep}/bin/grep -q '^/nix/store/'; then
|
||||
printf '%s: error: unsupported GHC executable path (not in Nix store): %q\n' \
|
||||
"$0" \
|
||||
"$NIX_GHC" \
|
||||
>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
NIX_GHC_PREFIX=$(${coreutils}/bin/dirname "$(${coreutils}/bin/dirname "$NIX_GHC")")
|
||||
NIX_GHC_DOCDIR=$NIX_GHC_PREFIX/share/doc/ghc/html
|
||||
|
||||
main() {
|
||||
hash=$(echo $NIX_GHC_PREFIX | ${gnused}/bin/sed -n 's|^/nix/store/\([a-z0-9]\+\).*|\1|p')
|
||||
title="Haddock index for $NIX_GHC_PREFIX"
|
||||
|
||||
header=$(
|
||||
printf 'Haddock index for <a href="%s">%s</a>\n' \
|
||||
$NIX_GHC_PREFIX \
|
||||
$NIX_GHC_PREFIX \
|
||||
)
|
||||
|
||||
suffix=''${hash:+-$hash}
|
||||
index_file=/tmp/haddock$suffix-index.html
|
||||
svg_file=/tmp/haddock$suffix.svg
|
||||
|
||||
eval "$(
|
||||
echo 'gen_index() {'
|
||||
echo ' html_head'
|
||||
"$NIX_GHC_PREFIX"/bin/ghc-pkg dump | ${gnused}/bin/sed -n '
|
||||
s/^---$/ reset/p
|
||||
s/^\(name\|version\):\s*\([-A-Za-z0-9_.]\+\)$/ \1=\2/p
|
||||
s/^haddock-html:\s*\([-A-Za-z0-9_./]\+\)$/ haddock_html \1/p
|
||||
'
|
||||
echo ' html_foot'
|
||||
echo '}'
|
||||
)"
|
||||
|
||||
gen_index > $index_file
|
||||
|
||||
"$NIX_GHC_PREFIX"/bin/ghc-pkg dot | ${graphviz}/bin/tred | ${graphviz}/bin/dot -Tsvg | ${gnused}/bin/sed '
|
||||
s/<svg width="[0-9]\+pt" height="[0-9]\+pt"/<svg width="3600px" height="100%"/
|
||||
' > $svg_file
|
||||
|
||||
echo $index_file
|
||||
}
|
||||
reset() {
|
||||
unset name version
|
||||
}
|
||||
haddock_html() {
|
||||
printf '<li>'
|
||||
printf '<a href="%s/index.html">%s</a>' "$1" "$name-$version"
|
||||
printf '</li>\n'
|
||||
}
|
||||
html_head() {
|
||||
printf '<!doctype html>\n'
|
||||
printf '<title>%s</title>\n' "$title"
|
||||
printf '<link href="%s" rel="stylesheet" type="text/css">\n' \
|
||||
"$NIX_GHC_DOCDIR/libraries/ocean.css"
|
||||
printf '<h1>%s</h1>\n' "$header"
|
||||
printf '<ul>\n'
|
||||
}
|
||||
html_foot() {
|
||||
printf '</ul>\n'
|
||||
printf '<a href="%s">graph</a>\n' "$svg_file"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
''
|
||||
29
packages/pdf-ocr.nix
Normal file
29
packages/pdf-ocr.nix
Normal file
@@ -0,0 +1,29 @@
|
||||
# OCR a PDF file to text using tesseract
|
||||
{
|
||||
writers,
|
||||
poppler_utils,
|
||||
tesseract,
|
||||
coreutils,
|
||||
}:
|
||||
writers.writeDashBin "pdf-ocr" ''
|
||||
set -efu
|
||||
|
||||
pdf_path="$(${coreutils}/bin/realpath "$1")"
|
||||
|
||||
[ -f "$pdf_path" ] || {
|
||||
echo "Usage: pdf-ocr FILE.pdf" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
tmpdir="$(${coreutils}/bin/mktemp -d)"
|
||||
trap 'rm -rf $tmpdir' EXIT
|
||||
|
||||
cd "$tmpdir"
|
||||
|
||||
${poppler_utils}/bin/pdftoppm -png "$pdf_path" pdf-ocr
|
||||
for png in pdf-ocr*.png; do
|
||||
${tesseract}/bin/tesseract "$png" "$png.txt" 2>/dev/null
|
||||
done
|
||||
|
||||
cat pdf-ocr-*.txt
|
||||
''
|
||||
77
packages/prospekte.nix
Normal file
77
packages/prospekte.nix
Normal file
@@ -0,0 +1,77 @@
|
||||
# Browse and view German supermarket flyers (Lidl, Aldi, REWE, Kaufland, Netto)
|
||||
{
|
||||
writers,
|
||||
curl,
|
||||
jq,
|
||||
fzf,
|
||||
zathura,
|
||||
coreutils,
|
||||
htmlq,
|
||||
gnugrep,
|
||||
gnused,
|
||||
lib,
|
||||
}:
|
||||
writers.writeDashBin "prospekte" ''
|
||||
export PATH=${lib.makeBinPath [ curl jq fzf zathura coreutils htmlq gnugrep gnused ]}:$PATH
|
||||
|
||||
lidl() {
|
||||
echo LIDL
|
||||
curl -sSL 'https://endpoints.lidl-flyer.com/v3/region-overview/lidl/de-DE/0.json' \
|
||||
| jq -r '
|
||||
.categories
|
||||
| map(select(.name == "Filial-Angebote") | .subcategories | map(.flyers))
|
||||
| flatten
|
||||
| flatten
|
||||
| .[]
|
||||
| .pdfUrl
|
||||
'
|
||||
}
|
||||
|
||||
aldi_nord() {
|
||||
echo ALDI nord
|
||||
echo 'https://magazine.aldi-nord.de/aldi-nord/aldi-aktuell/GetPDF.ashx'
|
||||
echo 'https://magazine.aldi-nord.de/aldi-nord/aldi-vorschau/GetPDF.ashx'
|
||||
}
|
||||
|
||||
rewe_berlin() {
|
||||
store_id=662366923
|
||||
publisher_id=1062
|
||||
|
||||
echo REWE
|
||||
curl -sSL "https://www.bonialserviceswidget.de/de/stores/$store_id/brochures?storeId=$store_id&publisherId=$publisher_id" | while read -r brochure_id; do
|
||||
curl -sSL "https://www.bonialserviceswidget.de/de/v5/brochureDetails/$brochure_id?publisherId=$publisher_id" | jq -r .pdfUrl
|
||||
done
|
||||
}
|
||||
|
||||
kaufland() {
|
||||
region_code=8920
|
||||
echo KAUFLAND
|
||||
curl -sSL https://filiale.kaufland.de/prospekte.html | htmlq --attribute href '.flyer a' | grep -Eo 'DE_de_KDZ[^/]*' | sed "s/_3000_/_''${region_code}_/" | while read -r flyer_id; do
|
||||
curl -sSL "https://endpoints.leaflets.kaufland.com/v3/$flyer_id/flyer.json?regionCode=$region_code" | jq -r .flyer.pdfUrl
|
||||
done
|
||||
}
|
||||
|
||||
netto_schwarz() {
|
||||
echo 'NETTO (schwarz)'
|
||||
curl -sSL 'https://squid-api.tjek.com/v2/catalogs?dealer_ids=90f2VL&order_by=created' \
|
||||
| jq -r '.[] | .id' \
|
||||
| while read -r flyer_id; do
|
||||
curl -sSL "https://squid-api.tjek.com/v2/catalogs/$flyer_id/download" \
|
||||
| jq -r .pdf_url
|
||||
done
|
||||
}
|
||||
|
||||
dir="$(mktemp -d)"
|
||||
trap 'rm -rf "$dir"' EXIT
|
||||
|
||||
prospekt_url="$( (
|
||||
lidl
|
||||
aldi_nord
|
||||
rewe_berlin
|
||||
kaufland
|
||||
netto_schwarz
|
||||
) | fzf)"
|
||||
|
||||
curl -sSL "$prospekt_url" -o "$dir/prospekt.pdf"
|
||||
zathura "$dir/prospekt.pdf"
|
||||
''
|
||||
12
packages/readme.nix
Normal file
12
packages/readme.nix
Normal file
@@ -0,0 +1,12 @@
|
||||
# Render a GitHub repo's README.md as a man page
|
||||
{
|
||||
writers,
|
||||
curl,
|
||||
pandoc,
|
||||
man,
|
||||
}:
|
||||
writers.writeDashBin "readme" ''
|
||||
${curl}/bin/curl -sSL "https://raw.githubusercontent.com/$*/master/README.md" \
|
||||
| ${pandoc}/bin/pandoc -f gfm -t man -s \
|
||||
| ${man}/bin/man -l -
|
||||
''
|
||||
Reference in New Issue
Block a user