mirror of
https://github.com/kmein/niveum
synced 2026-03-16 10:11:08 +01:00
delete obsolete scripts from .bin/
Removed 27 scripts that are dead, obsolete, or no longer relevant: - work-specific: elm-publish-private, watson2fdf.sh - hardcoded creds/sessions: libib.sh, ttrss-unread - broken/bitrotted: proxies.sh, dummy-alert, playlist_entries.sh, screencap.sh - trivial/unused: toposort.nix, tuesday-1800, mud.sh, load.sh, calendars.sh - no longer used: anki-poem.sh, browser, candyman, horoscope.sh, lieferando.sh, space.py, notetags.sh, sample-pdf.sh, lit.awk, countdown, json2csv.jq - dead mail tooling: mail-current-part, mail-current-query-find-part-by-* Remaining scripts will be packaged properly.
This commit is contained in:
@@ -1,29 +0,0 @@
|
||||
#!/bin/sh
|
||||
file="${1?please supply a poetry file}"
|
||||
[ -f "$file" ] || {
|
||||
echo "'$file' is no file"
|
||||
exit 1
|
||||
}
|
||||
|
||||
poem="$(mktemp)"
|
||||
clean () {
|
||||
rm "$poem"
|
||||
}
|
||||
trap clean EXIT
|
||||
sed '/^$/d' "$file" > "$poem"
|
||||
|
||||
htmlize() {
|
||||
awk 'ORS="<br/>"' \
|
||||
| head -c -5 # remove final <br/> characters
|
||||
}
|
||||
|
||||
for line_number in $(seq 1 "$(wc -l "$poem" | cut -d' ' -f1)"); do
|
||||
if [ "$line_number" -gt 3 ] && [ "$line_number" -gt 1 ]; then
|
||||
sed -n "$((line_number - 3)),$((line_number - 1))p" "$poem"
|
||||
else
|
||||
sed -n "1,$((line_number - 1))p" "$poem"
|
||||
fi | htmlize
|
||||
printf '\t'
|
||||
sed -n "${line_number},+1p" "$poem" | htmlize
|
||||
printf '\n'
|
||||
done
|
||||
24
.bin/browser
24
.bin/browser
@@ -1,24 +0,0 @@
|
||||
#!/bin/sh -e
|
||||
#
|
||||
# Usage: browser
|
||||
# pipe html to a browser
|
||||
# e.g.
|
||||
# $ echo '<h1>hi mom!</h1>' | browser
|
||||
# $ ron -5 man/rip.5.ron | browser
|
||||
|
||||
if [ -t 0 ]; then
|
||||
if [ -n "$1" ]; then
|
||||
open $1
|
||||
else
|
||||
cat <<usage
|
||||
Usage: browser
|
||||
pipe html to a browser
|
||||
$ echo '<h1>hi mom!</h1>' | browser
|
||||
$ ron -5 man/rip.5.ron | browser
|
||||
usage
|
||||
fi
|
||||
else
|
||||
f="/tmp/browser.$RANDOM.html"
|
||||
cat /dev/stdin > $f
|
||||
xdg-open $f
|
||||
fi
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
directory="$(mktemp -d)"
|
||||
trap clean EXIT
|
||||
clean() {
|
||||
rm -rf "$directory"
|
||||
}
|
||||
|
||||
year=$(date +%Y)
|
||||
output=/tmp/$year.pdf
|
||||
|
||||
for month in $(seq 1 12); do
|
||||
printf "\r%d" "$month" 1>&2
|
||||
astrolog -zN Berlin -qm "$month" "$year" -X -K -XA -Xr -Xm -Xb -Xo "$(printf "%s/%02d.bmp" "$directory" "$month")" -Xw 1080 720 2>/dev/null
|
||||
done
|
||||
printf "\r"
|
||||
|
||||
convert "$directory/*.bmp" "$output"
|
||||
echo "$output"
|
||||
@@ -1,25 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -efu
|
||||
|
||||
usage() {
|
||||
echo >&2 "$0 add-{reddit,telegram,youtube,twitch,twitter} NAME"
|
||||
exit 1
|
||||
}
|
||||
|
||||
candyman() {
|
||||
curl -fsSv http://news.r/api -H content-type:application/json -d "$(jq -n "
|
||||
{
|
||||
command: \"PRIVMSG\",
|
||||
params: [\"#all\", \"candyman: $1 $2\"]
|
||||
}
|
||||
")"
|
||||
}
|
||||
|
||||
[ $# -ge 2 ] || usage
|
||||
|
||||
case "$1" in
|
||||
add-reddit|add-telegram|add-youtube|add-twitter|add-twitch)
|
||||
candyman "$@"
|
||||
;;
|
||||
*) usage;;
|
||||
esac
|
||||
@@ -1,13 +0,0 @@
|
||||
#!/usr/bin/env -S awk -f
|
||||
function z() {
|
||||
getline < "/proc/uptime"
|
||||
close("/proc/uptime")
|
||||
return $0
|
||||
}
|
||||
BEGIN {
|
||||
x = z()
|
||||
while (1) {
|
||||
y = z()
|
||||
printf "%02d:%05.2f\r", (y - x) / 60, (y - x) % 60
|
||||
}
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
name=$RANDOM
|
||||
url='http://localhost:9093/api/v1/alerts'
|
||||
|
||||
echo "firing up alert $name"
|
||||
|
||||
# change url o
|
||||
curl -XPOST $url -d "[{
|
||||
\"status\": \"firing\",
|
||||
\"labels\": {
|
||||
\"alertname\": \"$name\",
|
||||
\"service\": \"my-service\",
|
||||
\"severity\":\"warning\",
|
||||
\"instance\": \"$name.example.net\"
|
||||
},
|
||||
\"annotations\": {
|
||||
\"summary\": \"High latency is high!\"
|
||||
},
|
||||
\"generatorURL\": \"http://prometheus.int.example.net/<generating_expression>\"
|
||||
}]"
|
||||
|
||||
echo ""
|
||||
|
||||
echo "press enter to resolve alert"
|
||||
read
|
||||
|
||||
echo "sending resolve"
|
||||
curl -XPOST $url -d "[{
|
||||
\"status\": \"resolved\",
|
||||
\"labels\": {
|
||||
\"alertname\": \"$name\",
|
||||
\"service\": \"my-service\",
|
||||
\"severity\":\"warning\",
|
||||
\"instance\": \"$name.example.net\"
|
||||
},
|
||||
\"annotations\": {
|
||||
\"summary\": \"High latency is high!\"
|
||||
},
|
||||
\"generatorURL\": \"http://prometheus.int.example.net/<generating_expression>\"
|
||||
}]"
|
||||
|
||||
echo ""
|
||||
@@ -1,7 +0,0 @@
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -p "(import <nixpkgs> { overlays = [ (import ~/work/fysiweb/engiadina-pwa/devops/pkgs) ]; }).elm-publish-private"
|
||||
#! nix-shell -i bash
|
||||
|
||||
set -efux
|
||||
|
||||
exec elm-publish-private "$@"
|
||||
@@ -1,34 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -efu
|
||||
|
||||
# Berlin: -d lodeg=13 -d lomin=22 -d losec=41 -d lodir=E -d ladeg=52 -d lamin=27 -d lasec=42 -d ladir=N -d usecoords=1 \
|
||||
# Kassel: -d lodeg=9 -d lomin=32 -d losec=5 -d lodir=E -d ladeg=51 -d lamin=18 -d lasec=17 -d ladir=N -d usecoords=1 \
|
||||
|
||||
|
||||
[ $# -eq 1 ] || {
|
||||
echo >&2 Usage: "$0" TIMESTAMP
|
||||
exit 1
|
||||
}
|
||||
|
||||
export TZ=UTC
|
||||
|
||||
chart_path="$(mktemp /tmp/chart_XXX.pdf)"
|
||||
|
||||
timestamp="$1"
|
||||
|
||||
year="$(date -d "@$timestamp" +%Y)"
|
||||
month="$(date -d "@$timestamp" +%m)"
|
||||
day="$(date -d "@$timestamp" +%d)"
|
||||
hour="$(date -d "@$timestamp" +%H)"
|
||||
minute="$(date -d "@$timestamp" +%M)"
|
||||
|
||||
curl -sSL 'https://edifyingfellowship.org/astro/' \
|
||||
-d lodeg=9 -d lomin=32 -d losec=5 -d lodir=E -d ladeg=51 -d lamin=18 -d lasec=17 -d ladir=N -d usecoords=1 \
|
||||
-d ybyr="$year" -d ybmo="$month" -d ybdy="$day" -d ybhr="$hour" -d ybmi="$minute" -d ybsc=0 -d ybtz="$TZ" \
|
||||
-d currenttime=0 \
|
||||
-d title="$timestamp" \
|
||||
-d options[]=VancouverWheel -d options[]=Arrow -d options[]=XBold -d options[]=HouseLabels -d options[]=Placidus \
|
||||
-d options[]=Sun -d options[]=Moon -d options[]=Mercury -d options[]=Venus -d options[]=Mars -d options[]=Jupiter -d options[]=Saturn -d options[]=Uranus -d options[]=Neptune -d options[]=Pluto -d options[]=Ascendant -d options[]=MC -d options[]=Lilith -d options[]=MeanNode -d options[]=TrueNode \
|
||||
-d aspectpct=100 -d format=PDF -d Submit= -o "$chart_path"
|
||||
|
||||
zathura "$chart_path"
|
||||
@@ -1,5 +0,0 @@
|
||||
#!/usr/bin/env -S jq -r -f
|
||||
(map(keys) | add | unique) as $cols
|
||||
| map(. as $row | $cols | map($row[.])) as $rows
|
||||
| $cols, $rows[]
|
||||
| @csv
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
session_id=7b638c194d9bda74f80043045018cc9e
|
||||
|
||||
declare -A libraries
|
||||
|
||||
libraries["Literatur"]=344428
|
||||
libraries["Sprache"]=344160
|
||||
libraries["Miscellanea"]=344427
|
||||
libraries["Wissenschaft"]=344429
|
||||
libraries["Relicta"]=565920
|
||||
|
||||
for library in ${!libraries[@]}
|
||||
do
|
||||
curl -sSL 'https://www.libib.com/library/functions/csv-export.php' -H "Cookie: PHPSESSID=$session_id" -d export="${libraries[$library]}" > "$library.csv"
|
||||
done
|
||||
@@ -1,81 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -efu
|
||||
|
||||
if echo "$1" | grep -Eq '[[:digit:]]{5}'; then
|
||||
PLZ="$1"
|
||||
else
|
||||
echo >&2 "Usage: $0 PLZ"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
lieferando_dir=/tmp/lieferando
|
||||
mkdir -p "$lieferando_dir/$PLZ"
|
||||
|
||||
fetch_restaurants() {
|
||||
cache_path="$lieferando_dir/$PLZ.json"
|
||||
|
||||
if [ -r "$cache_path" ]; then
|
||||
cat "$cache_path"
|
||||
else
|
||||
w3m -dump_source "http://www.lieferando.de/$PLZ" \
|
||||
| gunzip \
|
||||
| sed -n '/var restaurants/,/];$/p' \
|
||||
| sed 's/var restaurants =//;$s/;$//' \
|
||||
| prettier --parser=json \
|
||||
| jq '
|
||||
map({
|
||||
name: .[30] | .name,
|
||||
category: .[30] |.categories | split(", "),
|
||||
url: "http://lieferando.de\(.[30] | .url)",
|
||||
minutes: .[19],
|
||||
minimum: .[10],
|
||||
delivery: .[14]
|
||||
})' \
|
||||
| tee "$cache_path"
|
||||
fi
|
||||
}
|
||||
|
||||
fetch_menu() {
|
||||
[ $# -eq 1 ] || exit 1
|
||||
|
||||
slug="$(echo "$1" | sed 's!.*/!!')"
|
||||
cache_path="$lieferando_dir/$PLZ/$slug.json"
|
||||
|
||||
if [ -r "$cache_path" ]; then
|
||||
cat "$cache_path"
|
||||
else
|
||||
w3m -dump_source "$1" \
|
||||
| gunzip \
|
||||
| sed -n '/var MenucardProducts/,/\];/p' \
|
||||
| sed 's/var MenucardProducts =//;s/;$//' \
|
||||
| jq -r '
|
||||
unique_by(.productId)
|
||||
| group_by(.categoryId)
|
||||
| flatten
|
||||
' \
|
||||
| tee "$cache_path"
|
||||
fi
|
||||
}
|
||||
|
||||
data="$(fetch_restaurants)"
|
||||
|
||||
# echo "$data" | jq -c '.[]' | while read -r restaurant; do
|
||||
# fetch_menu "$(echo "$restaurant" | jq -r .url)"
|
||||
# done
|
||||
|
||||
selected_categories="$(echo "$data" | jq -r 'map(.category) | flatten | unique | .[]' | fzf -m)"
|
||||
|
||||
selected_restaurant_url="$(echo "$selected_categories" | jq --argjson restaurants "$data" -sRr '
|
||||
split("\n")[:-1] as $categories
|
||||
| $restaurants[]
|
||||
| select(.category - $categories != .category)
|
||||
| "\(.name) [🚴\(.minutes)min 💰\(.minimum)€ + \(.delivery)€] (\(.url))"
|
||||
' \
|
||||
| fzf \
|
||||
| sed 's/.*(//;s/)$//'
|
||||
)"
|
||||
|
||||
fetch_menu "$selected_restaurant_url" \
|
||||
| jq -r '.[] | "\(.price)\t\(.name)"' \
|
||||
| fzf -m \
|
||||
| awk '{print $0; sum += $1} END {print "-----"; print sum}'
|
||||
17
.bin/lit.awk
17
.bin/lit.awk
@@ -1,17 +0,0 @@
|
||||
BEGIN {
|
||||
if (!comment) comment = "--";
|
||||
if (!begin) begin = "\\begin{code}";
|
||||
if (!end) end = "\\end{code}";
|
||||
}
|
||||
{
|
||||
if ($0 == begin) {
|
||||
code = 1;
|
||||
print comment, $0;
|
||||
} else if ($0 == end) {
|
||||
code = 0;
|
||||
print comment, $0;
|
||||
} else {
|
||||
if (code) print $0;
|
||||
else print comment, $0;
|
||||
}
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
#!/bin/sh
|
||||
uptime | sed 's/.*load average: \(.*\), \(.*\), \(.*\)/\1 \2 \3/'
|
||||
@@ -1,3 +0,0 @@
|
||||
#! /bin/sh
|
||||
set -efu
|
||||
exec curl -fSs --unix-socket /tmp/much.api.sock http://localhost/current/part
|
||||
@@ -1,27 +0,0 @@
|
||||
#! /bin/sh
|
||||
# usage: mail-current-query-find-part-by-name NAME
|
||||
set -efu
|
||||
|
||||
name=$1
|
||||
|
||||
query=$(mail-current-query)
|
||||
result=$(notmuch show --entire-thread=false --format=json "$query")
|
||||
|
||||
part_id=$(printf %s "$result" | jq --arg name "$name" '
|
||||
[
|
||||
recurse |
|
||||
select(type == "object") |
|
||||
{ id, name: .filename } |
|
||||
select(.id != null and .name != null)
|
||||
] |
|
||||
map(select(.name == $name))[0].id
|
||||
')
|
||||
|
||||
if test "$part_id" = null; then
|
||||
printf 'error: could not find part with name %s\n' \
|
||||
"$name" \
|
||||
>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exec notmuch show --part="$part_id" "$query"
|
||||
@@ -1,39 +0,0 @@
|
||||
#! /bin/sh
|
||||
# usage: mail-current-query-find-part-by-type TYPE
|
||||
set -efu
|
||||
|
||||
type=$1
|
||||
|
||||
query=$(mail-current-query)
|
||||
result=$(notmuch show --entire-thread=false --format=json "$query")
|
||||
|
||||
part_id=$(printf %s "$result" | jq --arg type "$type" '
|
||||
#flatten|map(select(.!=null))[0].body[0] |
|
||||
#
|
||||
#if .["content-type"] == $type then
|
||||
# .id
|
||||
#elif .["content-type"] | test("^multipart/") then
|
||||
# .content|map(select(.["content-type"]==$type))[0].id
|
||||
#else
|
||||
# null
|
||||
#end
|
||||
|
||||
[
|
||||
recurse |
|
||||
select(type == "object") |
|
||||
{ id, type: .["content-type"] } |
|
||||
select(.id != null and .type != null)
|
||||
] |
|
||||
map(select(.type == $type))[0].id
|
||||
')
|
||||
|
||||
if test "$part_id" = null; then
|
||||
printf 'error: could not find part with type %s\n' \
|
||||
"$type" \
|
||||
>&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
exec notmuch show --part="$part_id" "$query"
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
ssh mud@hotdog.r -t "MUD_NICKNAME=$LOGNAME mud"
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# inspired by https://github.com/connermcd/bin/blob/1d38cb98812906d8b95dc6e51e1149e29261617d/notetags
|
||||
|
||||
cd "$HOME/notes/" || exit
|
||||
|
||||
[ -f tags ] && rm tags
|
||||
grep -r 'tags:' ./* | while read -r line; do
|
||||
file=$(echo "$line" | cut -d: -f1)
|
||||
unparsed_tags=$(echo "$line" | cut -d: -f3) #
|
||||
tags=$(echo "$unparsed_tags" | sed -e 's/tags: *//g' -e 's/[][,]//g')
|
||||
for tag in $tags; do
|
||||
echo "$tag $file /^$unparsed_tags$/;" >> tags
|
||||
done
|
||||
done
|
||||
@@ -1,2 +0,0 @@
|
||||
#!/bin/sh
|
||||
youtube-dl -ij "$*" | jq -sr '.[] | .webpage_url'
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/sh
|
||||
curl -sSL https://www.netzwelt.de/proxy/index.html \
|
||||
| pup ".tblc" \
|
||||
| xml-to-json /dev/stdin \
|
||||
| jq '
|
||||
.div.table.tbody.tr
|
||||
| map(
|
||||
.td
|
||||
| {
|
||||
ip: .[0].a.value,
|
||||
port: .[1],
|
||||
country: .[2] | (if type == "string" then . else .a.value end),
|
||||
security: .[3],
|
||||
protocol: .[4]
|
||||
}
|
||||
)
|
||||
'
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
filepath="$(shuf --head-count=1)"
|
||||
pages="$(pdfinfo "$filepath" | awk '/^Pages:/{print $2}')"
|
||||
random_page="$(shuf --input-range="1-$pages" --head-count=1)"
|
||||
zathura --page="$random_page" "$filepath"
|
||||
@@ -1,16 +0,0 @@
|
||||
#! /usr/bin/env nix-shell
|
||||
#! nix-shell -i sh -p coreutils byzanz xorg.xwininfo gnused
|
||||
|
||||
# shellcheck shell=sh
|
||||
# ref https://gist.github.com/aforemny/0994cb7f06ea30d56c8b9681ff5d2054
|
||||
|
||||
set -eux
|
||||
|
||||
eval "$(xwininfo | \
|
||||
sed -n -e 's/^ \+Absolute upper-left X: \+\([0-9]\+\).*/x=\1/p' \
|
||||
-e 's/^ \+Absolute upper-left Y: \+\([0-9]\+\).*/y=\1/p' \
|
||||
-e 's/^ \+Width: \+\([0-9]\+\).*/w=\1/p' \
|
||||
-e 's/^ \+Height: \+\([0-9]\+\).*/h=\1/p')"
|
||||
|
||||
trap "pkill -f 'sleep 360d'" INT
|
||||
byzanz-record -e "sleep 360d" -c -x $x -y $y -w $w -h $h "$@"
|
||||
@@ -1,49 +0,0 @@
|
||||
import ephem
|
||||
from datetime import datetime, date, timedelta
|
||||
|
||||
now = datetime.now()
|
||||
limit = now + timedelta(days=365)
|
||||
|
||||
|
||||
def events_until(limit):
|
||||
initial_date = ephem.Date(datetime.now())
|
||||
events = {}
|
||||
|
||||
now = initial_date
|
||||
while ephem.localtime(now) <= limit:
|
||||
now = ephem.next_full_moon(now)
|
||||
events[now] = "🌕"
|
||||
|
||||
now = initial_date
|
||||
while ephem.localtime(now) <= limit:
|
||||
now = ephem.next_new_moon(now)
|
||||
events[now] = "🌑"
|
||||
|
||||
now = initial_date
|
||||
while ephem.localtime(now) <= limit:
|
||||
now = ephem.next_vernal_equinox(now)
|
||||
events[now] = "spring equinox"
|
||||
|
||||
now = initial_date
|
||||
while ephem.localtime(now) <= limit:
|
||||
now = ephem.next_autumnal_equinox(now)
|
||||
events[now] = "fall equinox"
|
||||
|
||||
now = initial_date
|
||||
while ephem.localtime(now) <= limit:
|
||||
now = ephem.next_winter_solstice(now)
|
||||
events[now] = "winter solstice"
|
||||
|
||||
now = initial_date
|
||||
while ephem.localtime(now) <= limit:
|
||||
now = ephem.next_summer_solstice(now)
|
||||
events[now] = "summer solstice"
|
||||
return events
|
||||
|
||||
|
||||
events = events_until(limit)
|
||||
|
||||
|
||||
for date, event in sorted(events.items(), key=lambda x: x[0]):
|
||||
if ephem.localtime(date) < limit:
|
||||
print(ephem.localtime(date), event)
|
||||
@@ -1,81 +0,0 @@
|
||||
let
|
||||
lib = import <nixpkgs/lib>;
|
||||
in
|
||||
rec {
|
||||
inherit lib;
|
||||
|
||||
input = [
|
||||
{
|
||||
x = [
|
||||
"pool"
|
||||
"zfs"
|
||||
];
|
||||
y = [
|
||||
"mdadm"
|
||||
"raid1"
|
||||
];
|
||||
}
|
||||
{
|
||||
x = [
|
||||
"pool"
|
||||
"zfs"
|
||||
];
|
||||
y = [
|
||||
"disk"
|
||||
"sda"
|
||||
];
|
||||
}
|
||||
{
|
||||
x = [
|
||||
"mdadm"
|
||||
"raid1"
|
||||
];
|
||||
y = [
|
||||
"disk"
|
||||
"sdb"
|
||||
];
|
||||
}
|
||||
{
|
||||
x = [
|
||||
"mdadm"
|
||||
"raid1"
|
||||
];
|
||||
y = [
|
||||
"disk"
|
||||
"sdc"
|
||||
];
|
||||
}
|
||||
];
|
||||
|
||||
outNodes = node: graph: lib.unique (builtins.map (e: e.y) (builtins.filter (v: v.x == node) graph));
|
||||
|
||||
vertices = graph: lib.unique (builtins.map (x: x.y) graph ++ builtins.map (x: x.x) graph);
|
||||
|
||||
deleteVertex = node: graph: (builtins.filter (v: v.x != node && v.y != node) graph);
|
||||
|
||||
findSink =
|
||||
graph:
|
||||
lib.findFirst (v: outNodes v graph == [ ]) (lib.trace graph (builtins.abort "No sink found")) (
|
||||
vertices graph
|
||||
);
|
||||
|
||||
topSort =
|
||||
graph:
|
||||
if graph == [ ] then
|
||||
[ ]
|
||||
else if builtins.length graph == 1 then
|
||||
let
|
||||
only = builtins.head graph;
|
||||
in
|
||||
[
|
||||
only.y
|
||||
only.x
|
||||
]
|
||||
else
|
||||
let
|
||||
sink = findSink graph;
|
||||
in
|
||||
[ sink ] ++ topSort (deleteVertex sink graph);
|
||||
|
||||
output = topSort input;
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
#/usr/bin/env -S deno run -A:q
|
||||
set -x
|
||||
session_cache="$HOME/.cache/tt-rss.session"
|
||||
ttrss_endpoint=https://feed.kmein.de/api/
|
||||
ttrss_user=k
|
||||
ttrss_password=$(pass shared/tt-rss/password)
|
||||
|
||||
login() {
|
||||
if [ -f "$session_cache" ]; then
|
||||
session_id="$(cat "$session_cache")"
|
||||
else
|
||||
session_id="$(curl -d '{"op":"login","user":"'"$ttrss_user"'","password":"'"$ttrss_password"'"}' "$ttrss_endpoint" | jq -r .content.session_id)"
|
||||
echo "$session_id" > "$session_cache"
|
||||
fi
|
||||
}
|
||||
|
||||
login
|
||||
curl -d '{"sid":"'"$session_id"'","op":"getUnread"}' "$ttrss_endpoint" | jq .content
|
||||
@@ -1,16 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -efux
|
||||
expected_max_results=1024 # the upper bound on the number of restaurants
|
||||
radius=250
|
||||
|
||||
echo '[out:json];node(id:260050809)->.cbase;
|
||||
(
|
||||
node(around.cbase:'$radius')[amenity=fast_food];
|
||||
node(around.cbase:'$radius')[amenity=restaurant];
|
||||
);out;' \
|
||||
| curl -sSL -d @- -X POST http://overpass-api.de/api/interpreter \
|
||||
| jq --argjson random "$(shuf -i 0-$expected_max_results -n 1)" '
|
||||
.elements
|
||||
| length as $length
|
||||
| .[$random % $length]
|
||||
'
|
||||
@@ -1,26 +0,0 @@
|
||||
project=Filli
|
||||
year=2022
|
||||
|
||||
for month in Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec; do
|
||||
from="$(date +%F -d "$month 1, $year")"
|
||||
to="$(date +%F -d "$month 1, $year + 1 month")"
|
||||
watson report --json --from "$from" --to "$to" --project "$project"
|
||||
done | jq --slurp '
|
||||
def in_array($arr):
|
||||
. as $value | any($arr[]; . == $value);
|
||||
|
||||
map(
|
||||
["engadin-app","fysiweb","val-muestair","mia-engiadina","ol"] as $official_projects
|
||||
| (.timespan.from | .[0:7]) as $timespan
|
||||
| .projects | .[0]
|
||||
| .time as $total_time
|
||||
| .tags
|
||||
| select(. != null)
|
||||
| map(select(.name | in_array($official_projects)))
|
||||
| (map(.time)|add) as $official_time
|
||||
| map({key:.name, value:.time}) | from_entries
|
||||
| .other |= ($total_time - $official_time)
|
||||
| map_values(. / (60*60) | ceil)
|
||||
| .month |= $timespan
|
||||
)
|
||||
'
|
||||
Reference in New Issue
Block a user