master
Kinch 2023-12-07 10:08:50 +01:00
commit a5a7812fc8
57 changed files with 1712 additions and 0 deletions

6
address.nu Normal file
View File

@ -0,0 +1,6 @@
export def main [...query:string] {
let query = $query | str join " "
http get $"http://lyssa:8888/search?q='($query)'&addressdetails=1"
| reject licence boundingbox
#| first | get address
}

3
ar.nu Normal file
View File

@ -0,0 +1,3 @@
def ls [path:string] {
als $path
}

7
avocado-env.nu Normal file
View File

@ -0,0 +1,7 @@
export-env {
load-env {
KUBE_CONTEXT: "avocado"
HELM_KUBECONTEXT: "avocado"
XS_ENV_NAME: "AVOCADO(Incomplete)"
}
}

13
aws-prod-env.nu Normal file
View File

@ -0,0 +1,13 @@
export-env {
load-env {
MS_OCR_AUTHORITY: "https://otc-dev.aws.de.insiders.cloud/ocr/microsoft",
FLAIR_AUTHORITY: "http://localhost:8082",
XS_SF_URL: "https://api.production.de.insiders.cloud",
XS_URL: "https://xs-backend.production.de.insiders.cloud",
XS_SF_ACCOUNT: "xs.prod.paul",
KUBE_CONTEXT: "aws-prod"
HELM_KUBECONTEXT: "aws-prod"
XS_ENV_NAME: "AWS_PROD"
AWS_INFASTRUCTURE_ACCOUNT: "047349208615.dkr.ecr.eu-central-1.amazonaws.com"
}
}

14
aws-test-env.nu Normal file
View File

@ -0,0 +1,14 @@
export-env {
load-env {
MS_OCR_AUTHORITY: "https://otc-dev.aws.de.insiders.cloud/ocr/microsoft",
FLAIR_AUTHORITY: "http://localhost:8082",
XS_SF_URL: "https://api.test.de.insiders.cloud",
XS_URL: "https://xs-backend.test.de.insiders.cloud",
XS_SF_ACCOUNT: "xs.aws.test.tessi-admin",
KUBE_CONTEXT: "dev-test"
HELM_KUBECONTEXT: "dev-test"
XS_ENV_NAME: "AWS_TEST"
AWS_INFASTRUCTURE_ACCOUNT: "047349208615.dkr.ecr.eu-central-1.amazonaws.com"
XS_KAFKA_BOOSTRAP_SERVER: "localhost:9092"
}
}

66
aws.nu Normal file
View File

@ -0,0 +1,66 @@
def call_aws [args:list<string>] {
run-external --redirect-stdout "aws" $args
}
def comp-instanceId [] {
instances | each { |inst|
{
value: $inst.InstanceId,
description: $inst.InstanceType
}
}
}
export def login-into-docker [] {
^aws ecr get-login-password --region eu-central-1 | ^docker login --username AWS --password-stdin $env.AWS_INFASTRUCTURE_ACCOUNT
}
export def login [] {
call_aws ["sso", "login"]
login-into-docker
}
export def "restart instance" [instanceId:string@comp-instanceId] {
call_aws ["ec2", "reboot-instances", "--instance-ids", $instanceId]
}
export def "stop instance" [instanceId:string@comp-instanceId] {
call_aws ["ec2", "stop-instances", "--instance-ids", $instanceId]
}
export def "start instance" [instanceId:string@comp-instanceId] {
call_aws ["ec2", "start-instances", "--instance-ids", $instanceId]
}
export def "list alarm" [] {
call_aws [cloudwatch describe-alarms] | from json | get MetricAlarms | select -i AlarmName AlarmDescription | rename name description
}
def comp-alarm-name [] {
list alarm | get name
}
export def alarm [alarm:string@comp-alarm-name] {
call_aws [cloudwatch describe-alarm-history --alarm-name $alarm] | from json
| get AlarmHistoryItems
| where HistoryItemType == StateUpdate
| update HistoryData {|r| $r.HistoryData | from json}
| flatten
| update oldState {|r| $r.oldState | get stateValue}
| update newState {|r| $r.newState | get stateValue}
| update Timestamp { |r| $r.Timestamp | into datetime }
| select AlarmName Timestamp HistorySummary oldState newState
| rename name age summary old new
| insert date {|r| $r.age | date to-timezone "Europe/Berlin" | date format }
| where new == ALARM
| reverse
}
# list all ec2 instances
export def instances [] {
call_aws ["ec2", "describe-instances"] | from json
| get Reservations | get Instances | flatten
| select -i ImageId InstanceId InstanceType Architecture State KeyName PrivateIpAddress PublicIpAddress PrivateDnsName PublicDnsName Tags
| update State { |row| $row.State.Name }
}

7
backup.nu Normal file
View File

@ -0,0 +1,7 @@
def call_restic [] {
run-external "restic" "-r" "/Volumes/data/backup" "backup" ~/.ssh ~/.
}
export def main [] {
call_restic
}

3
bd.nu Normal file
View File

@ -0,0 +1,3 @@
export def compare-tables [other] {
}

29
bm.nu Normal file
View File

@ -0,0 +1,29 @@
export def load [] {
if ("~/.nu_bookmarks.csv" | path exists) {
open ~/.nu_bookmarks.csv
} else {
[]
}
}
export def get-completion [] {
load | rename description value
}
def save-bookmarks [] {
uniq | save -f "~/.nu_bookmarks.csv"
}
export def add [] {
load | append {path: (pwd), name: (pwd | path basename)} | save-bookmarks
}
export def find [name:string@get-completion] {
load | where name == $name | last | get path
}
export def-env main [name:string@get-completion] {
let dest = (load | where name == $name | last | get path)
cd $dest
}

45
br.nu Normal file
View File

@ -0,0 +1,45 @@
def settings-db [] {
$env.HOME | path join ".br.db"
}
def read-settings [] {
open (settings-db)
}
def comp-name [] {
read-settings | query db "SELECT * FROM formula" | select name desc | rename value description
}
# update the info about the formulars
export def update-cache [] {
let formulars = (http get https://formulae.brew.sh/api/formula.json)
let casks = (http get https://formulae.brew.sh/api/cask.json)
rm -f (settings-db)
$formulars | select name full_name desc | insert type "formula" | into sqlite --table_name "formula" (settings-db)
$casks | select token full_token desc | rename name full_name desc | insert type "cask" | into sqlite --table_name "formula" (settings-db)
}
export def search [pattern:string,--exact] {
let settings = (read-settings)
if $exact {
$settings | query db $"SELECT * FROM formula WHERE name = '($pattern)'"
} else {
$settings | query db $"SELECT * FROM formula WHERE name LIKE '%($pattern)%'"
}
}
export def install [name:string@comp-name] {
^brew install $name
}
# list all installed apps
export def "list installed" [] {
["Caskroom", "Cellar"] | each { |x|
ls ("/opt/homebrew" | path join $x) | get name | each {|x| $x | path basename}
} | flatten
}
export def up [] {
^brew update
^brew upgrade
}

5
cert.nu Normal file
View File

@ -0,0 +1,5 @@
# converts an PEM (PKS8) private key to p12 keystore
export def "rsa private key to p12" [pem:path] {
let destination = ($pem | path parse | update extension p12 | path join)
^openssl pkcs12 -export -nocerts -inkey $pem -out $destination
}

3
chores.nu Normal file
View File

@ -0,0 +1,3 @@
export def "build-nodes size" [] {
}

37
completion.nu Normal file
View File

@ -0,0 +1,37 @@
export def run [...x: any] {
let script = $"($env.PWD)/.nu"
nu $script ...$x
}
export def "nu-complete just recipes" [] {
^just --unstable --unsorted --dump --dump-format json
| from json
| get recipes
| transpose k v
| each {|x|
{
value: $x.k,
description: ( $x.v.parameters
| each {|y| $y.name}
| str join ' '
)
}
}
}
export def "nu-complete just args" [context: string, offset: int] {
let r = ($context | split row ' ')
^just --unstable -u --dump --dump-format json
| from json
| get recipes
| get ($r.1)
| get body
| each {|x| {description: ($x | get 0) }}
| prepend ''
}
export extern "just" [
recipes?: string@"nu-complete just recipes"
...args: any@"nu-complete just args"
]

38
corpus.nu Normal file
View File

@ -0,0 +1,38 @@
def settings-db [] {
$env.HOME | path join ".corpus.db"
}
def save-settings [] {
into sqlite (settings-db) --table_name "documents"
}
def query-settings [query] {
open (settings-db) | query db $query
}
def comp-corpus [] {
list | get corpus
}
# create a corpus
export def "add documents" [coprus:string, pattern:string] {
glob $pattern | each {|x| {path: $x, corpus: $coprus}} | save-settings
}
# list all corpora
export def list [] {
query-settings "SELECT DISTINCT corpus FROM documents"
}
# list all documents of a corpus
export def "show" [coprus:string@comp-corpus] {
query-settings "SELECT DISTINCT path FROM documents"
}
# choose randomly a file from corpus
export def choose [corpus:string@comp-corpus, --amount:int] {
let documents = (show corpus)
seq 1 $amount | each {
$documents | shuffle | first
}
}

18
credm.nu Normal file
View File

@ -0,0 +1,18 @@
export def entries [] {
^credm list-entries | from json
}
export def tokens [] {
^credm list-tokens | from json
}
export def get [account:string@entries] {
^credm get $account | from json
}
export def update-token-response [account:string@entries] {
let token_response = $in
let expires_in = ($token_response.validUntil | into datetime) - (date now)
let expires_in_secs = ($expires_in / 1sec | math floor)
$token_response.token | ^credm set-token $account $"($expires_in_secs)s"
}

15
dev-env.nu Normal file
View File

@ -0,0 +1,15 @@
export-env {
load-env {
MS_OCR_AUTHORITY: "http://10.54.150.152:5000",
DUCKLING_AUTHORITY: "http://duckling.xs.insiders.zz"
FLAIR_AUTHORITY: "https://flair-xs.dev.insiders.cloud",
XS_SF_URL: "https://api.internal.insiders.cloud",
XS_SF_ACCOUNT: "xs.dev.klara",
XS_URL: "https://backend-xs.dev.insiders.cloud",
XS_MANAGEMENT_URL: "https://management-xs.dev.insiders.cloud",
KUBE_CONTEXT: "k8s-xs"
HELM_KUBECONTEXT: "k8s-xs"
XS_ENV_NAME: "DEV"
XS_KAFKA_BOOSTRAP_SERVER: "localhost:9092"
}
}

13
dev-tools.nu Normal file
View File

@ -0,0 +1,13 @@
# parses an xml string to maven dependency artifact
export def "from maven-dep" [] {
get content
| each { |tag| { tag: $tag.tag, value: ($tag.content | get content | first ) } }
| reduce -f {} {|it, acc| $acc | upsert $it.tag $it.value }
}
# construct a gradle dependency from a dependency artifact
export def "to gradle-dep" [dep_type:string] {
let dep = $in
$"($dep_type)\(\"($dep.groupId):($dep.artifactId):($dep.version)\"\)"
}

57
dk.nu Normal file
View File

@ -0,0 +1,57 @@
def parse-time [] {
parse "{value} {unit} ago" | each {|x|
match $x.unit {
"hours" => "hr",
"days" => "d"
}
}
}
# list images. The result of this can be piped into the most other commands for dk
export def images [] {
^docker images --format "{{json . }}"
| from json --objects
| select CreatedSince Repository ID Size Tag
| each {|row|
let splitted_repo = ($row.Repository | split row '/' -n 2)
{
created-since: $row.CreatedSince,
host: ($splitted_repo | first),
tag: ($splitted_repo | last),
version: $row.Tag,
size: ($row.Size | into filesize)
id: $row.ID
}
}
}
# like "tag" but based on image specs
export def "batch tag" [call] {
each {|image_spec|
let old = $image_spec
let new = (do $call $image_spec)
{old: $old, new: $new}
} | each {|x|
tag ($x.old | to docker path) ($x.new | to docker path)
$x.new
}
}
# "rename" a docker image
export def tag [old:string, new:string] {
^docker tag $old $new
}
export def "push" [] {
each {|image_spec|
^docker push ($image_spec | to docker path)
}
}
# == implementation ==
def "to docker path" [] {
let image_spec = ($in)
$"($image_spec.host)/($image_spec.tag):($image_spec.version)"
}

4
docker-repo.nu Normal file
View File

@ -0,0 +1,4 @@
# list all images
export def list [] {
dkreg xtractionstudio-docker.insiders-technologies.de tree | lines | parse "{host}/{tag}:{version}"
}

18
duckling.nu Normal file
View File

@ -0,0 +1,18 @@
use ~/bin/nu_scripts/ht.nu
export def supported-languages [] {
[
"de_DE",
"en_GB",
"en_US",
"en_ES",
"fr_FR",
"it_IT",
"ja_JA",
]
}
export def main [--language(-l):string@supported-languages="de_DE"] {
let text = $in
^xh POST (ht with-path $env.DUCKLING_AUTHORITY "parse") --form $"locale=($language)" 'dims=["time", "email", "url"]' $"text=($text)" | from json
}

5
env-paul.nu Normal file
View File

@ -0,0 +1,5 @@
export-env {
load-env {
XS_SF_ACCOUNT: "xs.dev.paul",
}
}

34
filestore.nu Normal file
View File

@ -0,0 +1,34 @@
use std log
# upload documents to filestore and returns the location
export def put [glob:string,
--product:string="xs",
--tenantId:string="0000016",
] {
expand-glob $glob
| par-each { |x|
let location = (put-single $x.name $x.type (random uuid) $product $tenantId)
{
file: $x.name,
location: $location
}
} | flatten
}
def put-single [
document:path,
content_type:string,
container_id:string,
product:string,
tenantId:string,
] {
log debug $"Uploading file ($document) of type ($content_type)"
let filename = ($document | path basename)
http post -H ["Content-Type", $content_type] $"http://localhost:8300/api/v1/($product)/($tenantId)/($container_id)/($filename)" $document
}
def expand-glob [glob:string] {
glob -D $glob
| each {|x| ls --mime-type $x | first}
}

14
flair.nu Normal file
View File

@ -0,0 +1,14 @@
export def main [--authority:string] {
let input = $in
let host = $env.FLAIR_AUTHORITY
$input | each {|text|
http post -t application/json $"($host)/tags?limit=1" { text: $"($text)." }
} | flatten | get tokens
}
export def batch [file_to_text_fn] {
each {|ocr_path|
print $"flair processing of ($ocr_path | path basename)"
{filename: $ocr_path} | insert flair (($ocr_path | do $file_to_text_fn) | main --authority $env.FLAIR_AUTHORITY)
}
}

30
fs.nu Normal file
View File

@ -0,0 +1,30 @@
# batch rename a list<string> as path
export def rename [renamer,--dry-run] {
let paths = ($in)
let new_paths = ($paths | each { | old_path | $old_path | path parse }
| do $renamer)
let plan = $paths | zip $new_paths
if ($dry_run) {
return $plan
}
$plan | batch rename
}
# in: list<list<string>> and rename first to second for every list.
export def "batch rename" [] {
each { | row |
mkdir ($row.1 | path parse | get parent)
mv $row.0 $row.1 -i
}
}
# set the parent dir of a list<path>
export def "append dir" [new_dir:string] {
update parent {|p| $p.parent | path join $new_dir }
}
# set the filename of a list<path>
export def "set filename" [new_name:string] {
each { |p| $p.parent | path join $new_name }
}

5
gdl.nu Normal file
View File

@ -0,0 +1,5 @@
def determine-java-home [] {
/usr/libexec/java_home | lines | first
}
export alias main = ./gradlew

3
gradle.nu Normal file
View File

@ -0,0 +1,3 @@
export def release-info [] {
http get "https://services.gradle.org/versions/current"
}

76
health.nu Normal file
View File

@ -0,0 +1,76 @@
use sc.nu
use xs.nu
use bm.nu
use progress.nu
use std assert
def "assert record contains" [record, key:string, msg:string] {
assert (($record | get -i $key | is-empty) == false) $msg --error-label {
start: (metadata $record).span.start,
end: (metadata $record).span.end,
text: $"the record does not contains the key ($key)",
}
}
def "not" [value:bool] {
return !($value)
}
def "assert is not empty" [list, msg:string] {
assert (not ($list | is-empty)) $msg --error-label {
start: (metadata $list).span.start,
end: (metadata $list).span.end,
text: $"the list is not empty but it is",
}
}
def test [name: string, call] {
let accum = $in
print $"Executing: ($name)..."
let next = (try {
let result = ($accum | do $call ($accum | last | get -i result | default {}))
print "PASS"
{
name: $name,
status: PASS,
result: $result,
}
} catch { |e|
throw $e.raw
{
name: $name,
status: FAIL,
error: $e.msg,
}
})
$accum | append $next
}
export def main [] {
[{}]
| test "can login into smart cloud" { |_|
let response = (sc request-token)
assert record contains $response "token" "Token response does not contains a token"
}
| test "can create a service" { |_|
xs service ([(bm find xs-reg), "00001.jpg"] | path join) | last
}
| test "service is in pending state" { |new_service|
print $new_service.externalId
let services = (xs list-service | where externalId == $new_service.externalId)
assert is not empty $services $"the new create service ($new_service | select name externalId ) can be listed"
$new_service
}
| test "service will be in success state eventually" { |new_service|
assert (progress wait-until {
print $"check status for ($new_service.externalId)"
let status = (xs list-service | where externalId == $new_service.externalId | last | get -i status | default "")
print $"Has status ($status)"
$status == "SUCCESS"
} --step-time 1sec --overall 20sec
)
$new_service
}
}

76
hl.nu Normal file
View File

@ -0,0 +1,76 @@
export def helm-projects [] {
[{
value: "xs-backend",
description: "/Volumes/extreme/projects/xtraction-factory/XtractionStudioBackend/src/main/helm/xs-backend"
}]
}
export def hist [project:string@helm-projects] {
let helm_project_path = (helm-projects | where value == $project | first | get description | parse-helm-path)
cd $helm_project_path.directory
helm history ($helm_project_path.name) -o json | from json | update updated { |row| $row.updated | into datetime }
}
export def rollback [project:string@helm-projects, revision?:int] {
let helm_project_path = (helm-projects | where value == $project | first | get description | parse-helm-path)
cd $helm_project_path.directory
if $revision == null {
helm rollback ($helm_project_path.name)
} else {
helm rollback ($helm_project_path.name) $revision
}
}
# render a file from a helm chart.
export def render [
project:string@helm-projects, # the project name
path:string@comp-template-files, # the template file relative to the templates folder
values:string@comp-values-files, # the value file in addition to the values.yaml which will always be used
] {
let helm_project_path = ($project | helm-project-directory)
^helm template ($helm_project_path) -s ("templates" | path join $path) -f ($helm_project_path | path join $values) --debug
}
# == completition
export def comp-template-files [context: string] {
let project_path = ($context | split shell words | filter project-names | first | helm-project-directory)
let template_path = ([$project_path, "templates"] | path join)
glob $"($project_path)/templates/*.{yaml,yml}" | each { |template_file|
$template_file | path relative-to $template_path
}
}
def comp-values-files [context: string] {
let project_path = ($context | split shell words | filter project-names | first | helm-project-directory)
glob $"($project_path)/values-*.{yaml,yml}" | each { |template_file|
$template_file | path relative-to $project_path
}
}
# == implementation
# like split words but split only on spaces (ignoring multiple spaces)
def "split shell words" [] {
split row " " | filter { |x| ($x | is-empty) == false }
}
# filter words for valide project names
def "filter project-names" [] {
let words = $in
let project_names = (helm-projects | each {|x| $x.value})
$words | filter {|word| $word in $project_names}
}
def parse-helm-path [] {
let helm_path = $in
{
name: ($helm_path | path basename),
directory: ($helm_path | path dirname)
}
}
def helm-project-directory [] {
let project_name = $in
helm-projects | where value == $project_name | first | get description
}

20
ht.nu Normal file
View File

@ -0,0 +1,20 @@
export def get [url:string, --bearer:string] {
if bearer == nothing {
xh $url
} else {
print $url
http get -fe -H ["Authorization", $"bearer ($bearer)"] $url
}
}
export def post [url:string] {
xh POST $url
}
export def put [url:string, --bearer:string] {
xh PUT -A bearer -a $"($bearer)" $url
}
export def with-path [authority:string, path:string] {
$authority | url parse | update path $path | url join | str trim -c '?'
}

11
img.nu Normal file
View File

@ -0,0 +1,11 @@
def comp-to-png [] {
glob *.pdf | each { |x| $x | path relative-to $"(pwd)" }
}
# convert (a pdf) to png
export def "to png" [pattern:string@comp-to-png] {
glob $pattern | each {|pdf|
let dest = ($pdf | path parse | update extension png | path join)
^convert -density 300 $pdf $dest
$dest
}
}

11
it.nu Normal file
View File

@ -0,0 +1,11 @@
# zip two lists producing a table where the 'key' list are the keys.
export def "zip-keys" [ # -> list<any>
keys: list<any>
] {
zip $keys | reduce -f {} { |it, acc| $acc | upsert $it.1 $it.0 }
}
# split a table into equally sized chunks
export def chunk [chunk_size:int] {
window $chunk_size --stride $chunk_size -r
}

51
job.nu Normal file
View File

@ -0,0 +1,51 @@
# spawn task to run in the background
#
# please note that the it spawned a fresh nushell to execute the given command
# So it doesn't inherit current scope's variables, custom commands, alias definition, except env variables which value can convert to string.
#
# e.g:
# spawn { echo 3 }
export def spawn [
command: block, # the command to spawn
] {
let config_path = $nu.config-path
let env_path = $nu.env-path
let source_code = (view source $command | str trim -l -c '{' | str trim -r -c '}')
let job_id = (pueue add -p $"nu --config \"($config_path)\" --env-config \"($env_path)\" -c '($source_code)'")
{"job_id": $job_id}
}
export def log [
id: int # id to fetch log
] {
pueue log $id -f --json
| from json
| transpose -i info
| flatten --all
| flatten --all
| flatten status
}
# get job running status
export def status () {
pueue status --json
| from json
| get tasks
| transpose -i status
| flatten
| flatten status
}
# kill specific job
export def kill (id: int) {
pueue kill $id
}
# clean job log
export def clean () {
pueue clean
}
export def follow [] {
pueue follow
}

11
jwt.nu Normal file
View File

@ -0,0 +1,11 @@
use ~/bin/nu_scripts/it.nu
# parses a signed jwt token
export def parse [] {
let splitted = (split row '.')
{
header: ($splitted | get 0 | decode base64 -c url-safe-no-padding | from json),
payload: ($splitted | get 1 | decode base64 -c url-safe-no-padding | from json),
signature: ($splitted | get 2)
}
}

3
k9s.nu Normal file
View File

@ -0,0 +1,3 @@
export def main [] {
run-external "k9s" "--context" $env.KUBE_CONTEXT
}

23
kafka.nu Normal file
View File

@ -0,0 +1,23 @@
def call_kafka [args] {
run-external --redirect-stdout kafka-admin-cli $args
}
# list all kafka topics on your broker
export def topics [] {
call_kafka list-topics | lines | sort
}
# delete topics. Reads the topic names from plain stdin
export def "delete topics" [] {
$in | to text | call_kafka delete-topics
}
# describe topics. Reads the topic names from plain stdin
export def "describe topics" [] {
$in | to text | call_kafka describe-topics
}
# get the info for a list of topics. Reads the topic names from plain stdin
export def "topic info" [] {
$in | to text | call_kafka topics-info | from json
}

110
kube.nu Normal file
View File

@ -0,0 +1,110 @@
export def list-pods [] {
kubectl --context $env.KUBE_CONTEXT get pods --output json | from json | get items
}
export def pods [--running(-r)] {
if $running {
containers | where ready | select pod | uniq
} else {
containers | get pod | uniq
}
}
export def apply [] {
kubectl --context $env.KUBE_CONTEXT apply -f -
}
export def containers [] {
list-pods | each {|item|
$item.status.containerStatuses | flatten | select image name restartCount ready
| insert pod ($item.metadata.name)
| insert creation ($item.metadata.creationTimestamp)
| insert nodeName ($item.spec.nodeName)
} | flatten
}
def list-images [] {
containers | get image | uniq
}
export def set-image [deployment:string@apps, image:string@list-images] {
kubectl --context $env.KUBE_CONTEXT set image $"deployment/($deployment)" $"($deployment)=($image)"
}
export def "del app-pods" [app:string@apps] {
kubectl --context $env.KUBE_CONTEXT delete pods -l $"app=($app)"
}
# list all app names
export def apps [] {
containers | get name | uniq
}
# list all availble deployments
export def deployments [] {
call_kube ["get", "deployments"] | lines | parse "{name} {rest}" | skip | get name
}
def when [condition:bool, and_block] {
let stream = $in
if ($condition) {
$stream | do $and_block
} else {
$stream
}
}
def call_kube [args,--quiet] {
run-external --redirect-stdout "kubectl" "--context" $env.KUBE_CONTEXT $args
}
export def "pod logs" [...pods:string@pods] {
let args = [logs]
}
export def "deployment restart" [deployment:string@deployments] {
call_kube [rollout, restart, deployment, $deployment]
}
export def "deployment restart status" [deployment:string@deployments] {
call_kube [rollout, restart, deployment, $deployment]
}
# show the logs of all containers with a certain app label
export def "app logs" [
...app:string@apps, # the name of app
--json (-j), # if set parse the log as jsongn
--since:duration # how old the logs should be
--timestamps (-t),
--previous (-p),
--prefix,
] {
let args = [logs]
let args = ($args | append $"-l app in \(($app | str join ',')\)")
let args = if $since != null { $args | append $"--since=($since / 1min)m"} else { $args }
let args = if $timestamps { $args | append "--timestamps" } else { $args }
let args = if $previous { $args | append "--previous" } else { $args }
let args = if $prefix { $args | append "--prefix" } else { $args }
call_kube $args
| when $json { from json --objects }
}
# List the logs for a deployment
export def "deployment logs" [
deployment:string@deployments
--json (-j), # if set parse the log as json
--since:duration # how old the logs should be
--timestamps (-t),
--previous (-p),
--prefix,
] {
let args = [logs]
let args = ($args | append $"deployment/($deployment)")
let args = if $since != null { $args | append $"--since=($since / 1min)m"} else { $args }
let args = if $timestamps { $args | append "--timestamps" } else { $args }
let args = if $previous { $args | append "--previous" } else { $args }
let args = if $prefix { $args | append "--prefix" } else { $args }
call_kube $args
| when $json { lines | filter { |line| $line | str starts-with '{' } | each { |line| $line | from json } }
}

11
local-env.nu Normal file
View File

@ -0,0 +1,11 @@
export use ~/bin/nu_scripts/dev-env.nu
export-env {
load-env {
XS_URL: "http://localhost:8080",
FLAIR_AUTHORITY: "http://lyssa:8082",
XS_MANAGEMENT_URL: "http://localhost:8081",
XS_ENV_NAME: "LOCAL"
XS_KAFKA_BOOSTRAP_SERVER: "lyssa:9092"
}
}

8
lyssa-env.nu Normal file
View File

@ -0,0 +1,8 @@
export use ~/bin/nu_scripts/dev-env.nu
export-env {
load-env {
FLAIR_AUTHORITY: "http://lyssa:8082",
}
}

7
maven-dev.xml Normal file
View File

@ -0,0 +1,7 @@
<dependencies>
<dependency>
<groupId>org.apache-extras.beanshell</groupId>
<artifactId>bsh</artifactId>
<version>2.0b6</version>
</dependency>
</dependencies>

7
mitm.nu Normal file
View File

@ -0,0 +1,7 @@
export def "forward cloud" [] {
mitmproxy -p 7070 --mode $"reverse:($env.XS_SF_URL)"
}
export def "forward backend" [] {
mitmproxy -p 7071 --mode $"reverse:($env.XS_URL)"
}

19
ms-form.nu Normal file
View File

@ -0,0 +1,19 @@
use std assert
export def comp-authority [] {
[
{ value: "http://10.54.150.152:5001", description: "internal ms ocr form recognizer" }
]
}
export def main [file:path, --authority:string@comp-authority] {
let authority = if $authority == $nothing {
assert ("MS_OCR_AUTHORITY" in $env) $"no authority specified. Use either an environment or spezify --host-name"
$env.MS_OCR_AUTHORITY
} else {
$authority
}
http post -H [content-type, application/octet-stream] $"($authority)/formrecognizer/documentModels/prebuilt-read:syncAnalyze?api-version=2022-08-31" (open $file)
}

58
ms-ocr.nu Normal file
View File

@ -0,0 +1,58 @@
use std assert
def comp-authority [] {
[
{ value: "http://10.54.150.152:5000", description: "internal ms ocr"},
]
}
export def main [pdf:path, --authority:string@comp-authority] {
let authority = if $authority == $nothing {
assert ("MS_OCR_AUTHORITY" in $env) $"no authority specified. Use either an environment or spezify --host-name"
$env.MS_OCR_AUTHORITY
} else {
$authority
}
http post -H [content-type, application/octet-stream] $"($authority)/vision/v3.2/read/syncAnalyze?language=de&readingOrder=natural" (open $pdf)
}
# list<str> => [{filename: str, ocr: table}]
export def batch [] {
each { |image_path|
print $"ms ocr processing of (ansi gb)($image_path | path basename)(ansi reset)"
if ($image_path | path exists) {
let result = (main $image_path)
{filename: $image_path, ocr: $result}
}
}
}
# get the pages from ocr
export def "slice pages" [...page_numbers:int] {
let ocr = ($in)
let slice = ($ocr | get analyzeResult.readResults | enumerate
| where { |x| $x.index in $page_numbers }
| get item
| wrap readResults
)
$ocr | update analyzeResult $slice
}
export def create-ocr-cache [directory:string=""] {
mkdir ocr_cache
glob $"([$directory, *{pdf,png,jpg,jpeg,tif,tiff}] | path join)" | batch | each { |x| $x.ocr | save --force $"ocr_cache/($x.filename | path basename)-ms-ocr.json" }
}
export def "to-words" [] {
get analyzeResult.readResults.lines | flatten | get words | flatten | get text
}
export def "to-lines" [] {
get analyzeResult.readResults.lines | flatten | get text
}
export def "to-text" [] {
to-lines | str join '. '
}

2
net.nu Normal file
View File

@ -0,0 +1,2 @@
export def services [host:string, service:string] {}

2
nxs/mod.nu Normal file
View File

@ -0,0 +1,2 @@
export module settings.nu
export module traces.nu

6
nxs/regression.nu Normal file
View File

@ -0,0 +1,6 @@
use remote.nu
export def run [] {
print "test"
remote sync
}

9
nxs/remote.nu Normal file
View File

@ -0,0 +1,9 @@
use settings.nu
use settings.nu [comp-host-names, comp-project-names]
# sync code to remove machine
export def "sync code" [host:string@comp-host-names, project:string@comp-project-names] {
# print $"sync with ($environment.build_host)"
# run-external "rsync" "-avPq" "--delete" "--filter=:- .gitignore" "--exclude" .git $"(project-root)/" $"($environment.build_host):(build-host-path)"
}

63
nxs/settings.nu Normal file
View File

@ -0,0 +1,63 @@
def settings-db [] {
$env.HOME | path join ".nxs.db"
}
def save [table_name:string] {
into sqlite (settings-db) --table_name $table_name
}
def query-settings [query:string] {
print $query
open (settings-db) | query db $query
}
export def add-host [host:string, working_dir:string] {
query-settings $"DELETE FROM hosts WHERE host is '($host)'"
[{host:$host, working_dir: $working_dir}] | save "hosts"
}
export def get-host [host:string] {
query-settings $"SELECT * FROM hosts WHERE host = '($host)'"
}
export def add-path [section:string, key:string, value:path] {
[{key:string, value:$value}] | save $section
}
export def add [section:string, key:string, value?:string] {
[{key:string, value:$value}] | save $section
}
export def set-path [section:string, key:string, value:path] {
del $section $key
[{key:$key, value:$value}] | save $section
}
export def set [section:string, key:string, value?:string] {
del $section $key
[{key:$key, value:$value}] | save $section
}
export def del [section:string, key:string] {
query-settings $"DELETE FROM ($section) WHERE key is '($key)'"
}
export def get [section:string@"comp-section-names"] {
query-settings $"SELECT * FROM ($section)"
}
export def "comp-section-names" [] {
open (settings-db) | columns
}
export def "comp-project-names" [] {
get "projects" | select key value | rename value description
}
export def "comp-host-names" [] {
open (settings-db) | get hosts
}
# print settings
export def main [] {
open (settings-db)
}

13
nxs/traces.nu Normal file
View File

@ -0,0 +1,13 @@
# list the last `limit` traces. filtering out traces with less than 100 spans
export def main [limit:int=1000] {
http get $"http://lyssa:9411/api/v2/traces?limit=($limit)"
| filter noice traces
}
export def "tags of" [span:string] {
}
def "filter noice traces" [] {
where {|t| ($t | length) > 100}
}

33
progress.nu Normal file
View File

@ -0,0 +1,33 @@
export def wait-for [wait_time:duration] {
let steps = $wait_time / 1sec
seq 0 $steps | each { |i|
print -n -e $"(ansi erase_entire_line)\r($wait_time - ($i * 1sec))"
sleep 1sec
}
print ""
}
export def wait-until [call, --step-time:duration, --overall:duration] {
let input_stream = $in
let end_time = (date now) + $overall
loop {
if (do $call $input_stream) {
return true
}
if ($end_time < (date now)) {
return false
}
wait-for $step_time
}
}
export def "progressed-each" [call] {
let stream = ($in)
let overall = ($stream | length)
$stream | enumerate | each { |x|
print -n -e $"(ansi erase_entire_line)\r($x.index)/($overall)"
do $call $x.item
}
}

11
properties.nu Normal file
View File

@ -0,0 +1,11 @@
export def "from properties" [] {
lines
| parse '{key}={value}'
}
export def get-value [key:string] {
lines
| parse '{key}={value}'
| where key == $key
| get -i value
| to text
}

46
regression.nu Normal file
View File

@ -0,0 +1,46 @@
use s3.nu
export def latest-daily [] {
let latest = (s3 files xs-dev xs-reg-result/basic | sort-by lastModified | reverse | where key =~ "daily" | first)
[
"xs-dev",
($latest.url | url parse | get path | str trim -c '/'),
$latest.key
] | path join | s3 get-csv
}
export def latest-daily-detail [] {
let latest = (s3 files xs-dev xs-reg-result/detail | sort-by lastModified | reverse | where key =~ "daily" | first)
[
"xs-dev",
($latest.url | url parse | get path | str trim -c '/'),
$latest.key
] | path join | s3 get-csv
}
export def prop-fields [] {
get proposedFields.fields | flatten | flatten | select name type value
}
export def "detail worst" [type:string,--limit:int=3,--error-category=["FP", "FN"]] {
where Fieldtype =~ $type | group-by Filename | transpose filename data
| each { |f| $f | insert errors ($f.data | where Classification in $error_category | get Count | default 0 ) }
| sort-by errors | reverse | take $limit
}
export def delta [list1, list2] {
$list1 | zip $list2 | each {|r|
$r.0 - $r.1
}
}
# compares a report.csv with a baseline
export def "compare daily" [] {
let report = ($in)
let f1_report = $report | get "F1 Score"
let f1_latest = (latest-daily | get "F1 Score")
let f1_delta = (delta $f1_report $f1_latest)
let f1_delta_row = ($f1_delta | wrap "F1 Score Delta")
$report | merge $f1_delta_row
}

48
s3.nu Normal file
View File

@ -0,0 +1,48 @@
def mc-hosts [] {
open ~/.mc/config.json | get aliases | columns
}
export def buckets [host:string@mc-hosts] {
^mc ls --json $host | from json --objects | get key
}
export def "del bucket" [host:string@mc-hosts, bucket:string] {
^mc rb --force $"($host)/($bucket)"
}
export def "download bucket" [host:string@mc-hosts, bucket:string] {
^mc cp --recursive ([$host, $bucket ] | path join) .
}
# delete all files within a bucket (but not the bucket itself)
export def "empty bucket" [host:string@mc-hosts, bucket:string:string] {
^mc rm --recursive --force ([$host, $bucket] | path join)
}
export def "mk bucket" [host:string@mc-hosts, bucket:string] {
^mc mb $"($host)/($bucket)"
}
export def files [host:string@mc-hosts,prefix:string=""] {
^mc ls -r --json ([$host, $prefix] | path join) | from json --objects
| insert host $host
| insert prefix $prefix
}
export def folders [host:string@mc-hosts] {
^mc ls --json $host | from json --objects
}
export def "delete files" [] {
let files = ($in | each { |x| [$x.host, $x.prefix, $x.key] | path join })
run-external --redirect-stdout mc rm $files
}
export def "cat files" [] {
let files = ($in | each { |x| [$x.host, $x.prefix, $x.key] | path join })
run-external --redirect-stdout mc cat $files
}
export def get-csv [] {
^mc cat $in | from csv
}

132
sc.nu Normal file
View File

@ -0,0 +1,132 @@
use ~/bin/nu_scripts/ht.nu
use ~/bin/nu_scripts/jwt.nu
use ~/bin/nu_scripts/credm.nu
def token-name [] {
$"($env.XS_SF_ACCOUNT).token"
}
export def request-token [] {
credm get $env.XS_SF_ACCOUNT
| select login password
| rename username password
| to json
| ht post (ht with-path $env.XS_SF_URL "/1/rest/accounts/authentication/requesttoken")
| from json
}
def today [] {
date now | format date "%Y%m%d"
}
export def request-tenant-key [] {
let token = (request-token)
let tenant_id = ($token | get token | jwt parse | get payload.tenantId)
let url = (ht with-path $env.XS_SF_URL $"/1/rest/security/keyvault/key/($tenant_id)/(today)")
ht get $url --bearer $token.token
}
def sc-get [path:string] {
^xh get -A bearer -a $"(provide-access-token)" (ht with-path $env.XS_SF_URL $path) | from json
}
def sc-post [path:string] {
^xh post -A bearer -a $"(provide-access-token)" (ht with-path $env.XS_SF_URL $path) | from json
}
def sc-put [path:string] {
^xh put -A bearer -a $"(provide-access-token)" (ht with-path $env.XS_SF_URL $path) | from json
}
def sc-delete [path:string] {
^xh delete -A bearer -a $"(provide-access-token)" (ht with-path $env.XS_SF_URL $path) | from json
}
export def-env update-access-token [] {
print -e $"Login as (ansi pb)($env.XS_SF_ACCOUNT)(ansi reset)"
let token_response = (request-token)
if "ErrorCode" in $token_response {
print "ERROR"
let error_msg = $"Could not login into smart cloud as user ($env.XS_SF_ACCOUNT): ($token_response | get Message)"
error make {
msg: $error_msg
}
}
$token_response | credm update-token-response (token-name)
$token_response.token
}
export def provide-access-token [] {
let token = (credm get (token-name))
if not "password" in $token {
update-access-token
} else {
$token.password
}
}
export def list-subsystems [] {
sc-get "/1/rest/subsystems/management/" | get subsystemInfos
}
export def upload-sub [name:string@xs-subsystem-names, sub_path:string] {
^xh put --form -A bearer -a $"(provide-access-token)" (ht with-path $env.XS_SF_URL $"/1/rest/subsystems/management/($name)") 'subsystem-meta-data={ "description" : "Eine tolle Beschreibung" }' $"subsystem-data@($sub_path)"
| from json
}
export def xs-subsystem-names [] {
[
"XS-Service-Test",
"XS-Service-Prod",
]
}
def xs-matching-db-table-names [] {
[
"XS_SETTINGS"
]
}
export def start-transaction [
subsystem:string@xs-subsystem-names,
table:string@xs-matching-db-table-names,
columns:list
--clear
] {
{
subsystem: $subsystem,
tableMetaData: [{
tableName: $table,
clear: $clear,
columns: $columns
}]
} | to json
| sc-post /1/rest/subsystems/transactions/
}
export def tenant-settings [] {
let creds = (credm get $env.XS_SF_ACCOUNT)
{
"XS_BACKEND_URL": (ht with-path $env.XS_URL "/"),
"SF_URL": (ht with-path $env.XS_SF_URL "/"),
"TENANT_USER": $creds.login,
"TENANT_PASSWORD": $creds.password
}
}
export def commit-transaction [transaction_id:string] {
sc-delete $"/1/rest/subsystems/transactions/($transaction_id)/commit"
}
export def settings-db-row-count [name:string@xs-subsystem-names] {
sc-get $"/1/rest/subsystems/masterdata/count/($name)/XS_SETTINGS/Active"
}
export def update-tenant-settings [name:string@xs-subsystem-names] {
print -e $"Update settings for tenant ($env.XS_SF_ACCOUNT) and subsystem ($name)"
let transaction_id = (start-transaction $name XS_SETTINGS ["SETTINGS_KEY", "SETTINGS_VALUE"] --clear | get transactionId)
print -e $"transaction id: (ansi pb)($transaction_id)(ansi reset)"
tenant-settings | transpose | rename key value | each { |x| [$x.key,$x.value] } | to json
| sc-put $"/1/rest/subsystems/transactions/($transaction_id)/XS_SETTINGS"
commit-transaction $transaction_id
}

View File

@ -0,0 +1,7 @@
use ~/bin/nu_scripts/aws-test-env.nu
export-env {
load-env {
XS_ENV_NAME: "AWS_TEST(SE-ADMIN)"
XS_SF_ACCOUNT: "insiders-admin@se.insiders.cloud",
}
}

7
user-se-api-aws-test.nu Normal file
View File

@ -0,0 +1,7 @@
use ~/bin/nu_scripts/aws-test-env.nu
export-env {
load-env {
XS_ENV_NAME: "AWS_TEST(SE)"
XS_SF_ACCOUNT: "xs.aws.test.se.insiders-api",
}
}

163
xs-deploy.nu Normal file
View File

@ -0,0 +1,163 @@
use ~/bin/nu_scripts/bm.nu
use ~/bin/nu_scripts/hl.nu
use ~/bin/nu_scripts/kube.nu
use std log
# == Environment ==
def project-root [] { bm find XtractionStudioBackend }
def helm-project-name [] { "xs-backend" }
def build-host-path [] { "workingdirs/xtractionstudio" }
export def component-definitions [] {
[
{app: demo, deployment: xs-demo, target: "pushDemoImage"},
{app: logging-proxy, deployment: xs-logging-proxy, target: "pushLoggingProxyImage"},
{app: backend, deployment: xs-backend},
{app: ducklingClient, deployment: xs-duckling},
{app: flairClient, deployment: xs-flair},
{app: imageRenderer, deployment: xs-image-renderer},
{app: languageExtractor, deployment: xs-language-extractor},
{app: ocrService, deployment: xs-ocr-service},
{app: addressExtractor, deployment: xs-address-extractor},
{app: steward, deployment: xs-steward},
{app: statisticsService, deployment: xs-statistics-creator},
{app: bankExtractor, deployment: xs-bank-extractor},
{app: scriptExtractor, deployment: xs-script-extractor},
]
}
def comp-app-name [] {
apps
}
def apps [] {
component-definitions | get app
}
def find-app-target [app_name:string] {
component-definitions | where app == $app_name | first | get -i target | default $"apps:($app_name):dockerPushImage"
}
def environments [] {
[
{
name: "dev",
docker_repo: "xtractionstudio-docker.insiders-technologies.de",
helm_target: "Development",
needs_aws_login: false,
build_host: "lyssa",
},
{
name: "aws-test",
docker_repo: "047349208615.dkr.ecr.eu-central-1.amazonaws.com",
helm_target: "Awstest",
needs_aws_login: true,
build_host: "gitlab-build-host",
},
{
name: "aws-prod",
docker_repo: "047349208615.dkr.ecr.eu-central-1.amazonaws.com",
helm_target: "Awsprod",
needs_aws_login: true,
build_host: "gitlab-build-host",
},
]
}
def find-environment [] {
let name = $in
environments | where name == $name | first
}
# == Completitions ==
def comp-environment-name [] {
environments | get name
}
# == Externals ==
def call_gradle [args] {
let gradle_wrapper = ([(project-root), "gradlew"] | path join)
log debug $"use gradle_wrapper location: ($gradle_wrapper)"
run-external $gradle_wrapper $args
}
def sync [environment] {
print $"sync with ($environment.build_host)"
run-external "rsync" "-avP" "--delete" "--filter=:- .gitignore" "--exclude" .git $"(project-root)/" $"($environment.build_host):(build-host-path)"
}
def remote_aws_login [$environment] {
ssh ($environment.build_host) -t $"aws ecr get-login-password --region eu-central-1 | docker login --username AWS --password-stdin ($environment.docker_repo)"
}
def remote_gradle [environment, args:list<string>] {
print $"execute gradle with args: ($args)"
ssh $environment.build_host -t $"cd (build-host-path) ; EXCLUDE_PUSH_IMAGE=yes ./gradlew ($args | str join ' ')"
}
def build-on-host [environment, apps:list<string>] {
# let gradle_targets = ($apps | each {|app| find-app-target $app })
execute-gradle-task-on-env $environment "pushAllToDev"
}
def execute-gradle-task-on-env [environment, task:string] {
let gradle_args = (["--project-prop", $"docker.repo=($environment.docker_repo)", "--parallel"] | append $task)
if ($environment.needs_aws_login) {
remote_aws_login $environment
}
remote_gradle $environment $gradle_args
}
def helm-deploy [environment] {
print "Install helm app"
remote_gradle $environment [$"helmInstallXsBackendTo($environment.helm_target)"]
}
# build and pushing the image without changing the k8 chart
export def "push images" [environment_name:string@comp-environment-name] {
let environment = ($environment_name | find-environment)
cd (project-root)
build-on-host $environment (apps)
}
# install the complete helm app the the choosen environment, building all container
export def "full" [environment_name:string@comp-environment-name] {
let environment = ($environment_name | find-environment)
cd (project-root)
sync $environment
build-on-host $environment (apps)
helm-deploy $environment
}
# execute a single gradle task on remote machine
export def "task" [environment_name:string@comp-environment-name, task:string] {
let environment = ($environment_name | find-environment)
cd (project-root)
sync $environment
execute-gradle-task-on-env $environment $task
}
# only install the current helm chart (without building and pushing the images)
export def "helm app" [environment_name:string@comp-environment-name] {
let environment = ($environment_name | find-environment)
cd (project-root)
sync $environment
helm-deploy $environment
}
# update single deployments i.e. pushing the docker image and restart the deployment
export def "update app" [environment_name:string@comp-environment-name, app_name:string@comp-app-name] {
let environment = ($environment_name | find-environment)
cd (project-root)
sync $environment
build-on-host $environment [$app_name]
let component = (component-definitions | where app == $app_name | first)
print $"restart k8s deployment ($component.deployment)"
kube deployment restart $component.deployment
}
export def "sync code" [environment_name:string@comp-environment-name] {
let environment = ($environment_name | find-environment)
cd (project-root)
sync $environment
}

176
xs.nu Normal file
View File

@ -0,0 +1,176 @@
use ~/bin/nu_scripts/ht.nu
use ~/bin/nu_scripts/credm.nu
use ~/bin/nu_scripts/sc.nu
export def actuators [] {
http get (ht with-path $env.XS_MANAGEMENT_URL "/actuator") | from json | get _links
}
export def "script source" [] {
let body = $in
ht put (ht with-path "http://localhost:8230" "/source") --bearer (sc provide-access-token)
}
def actuators-names [] {
actuators | columns
}
def metrics-names [] {
actuator metrics | get names
}
export def comp-service-ids [] {
xs list-service | select externalId name | rename value description
}
export def "delete service" [service_id:string@comp-service-ids] {
{id: $service_id} | to csv | ^xs delete-service
}
export def actuator [name:string@actuators-names] {
http get (actuators | get $name | get href) | from json
}
export def metrics [name:string@metrics-names] {
http get (ht with-path $env.XS_MANAGEMENT_URL $"/actuator/metrics/($name)") | from json
}
export def batch-create-service [pattern?:string, --clean] {
$in | to csv | ^xs batch-create-service | from csv
}
export def delete-all-service [] {
^xs delete-all-service
}
export def "get proposed fields" [] {
^xs proposed-fields | from json
}
# download the analyse results to file
export def "get analyse result" [
analyse_task_id:string@comp-analyse-task-id
] {
collect-task-ids $analyse_task_id | to csv | ^xs download-analyse-result
}
# download the images from analyse result
export def "get analyse images" [analyse_task_id?:string@comp-analyse-task-id] {
collect-task-ids $analyse_task_id | to csv | ^xs download-analyse-images
}
def comp-analyse-task-id [] {
list-analyse-tasks | get id
}
def collect-task-ids [task_id?:string] {
let tasks = ($in)
if ($task_id != null) {
[{id: $task_id}]
} else {
$tasks | select id
}
}
export def "group proposed fields" [] {
get fields | flatten | flatten | select name type value | group-by name | values | each { |row| $row | { name: ($row | first | get name) , type: ($row | first | get type), value: ($row | get value | str join ' ') } } | sort-by name
}
export def delete-analyse-tasks [] {
to csv | ^xs delete-analyse-tasks
}
export def delete-all-analyse-tasks [] {
list-analyse-tasks | delete-analyse-tasks
}
# triggers an re-analyse of a given task
export def restart [task_id?:string@comp-analyse-task-id] {
collect-task-ids $task_id | to csv | ^xs restart
}
def find-images [] {
fd -Ie pdf -e jpg -e tif -e tiff -e jpeg | lines
}
# creates a new analyse task for the provided images
export def analyse [
...patterns:string@find-images, # a list of paths or glob pattern for your images (jpg,pdf,tiff)
--squash # if set to true create a single analyse task for all images
] {
let $paths = ($patterns
| each {|p| glob $p }
| reduce -f [] {|it, acc| $acc | append $it}
| each {|p| {path: $p}}
)
if $squash {
$paths | to csv | ^xs multi-part-analyse | from csv | insert paths ($paths | get path)
} else {
$paths | batch-analyse
}
}
export def service [path:string@find-images] {
{path:$path} | batch-create-service
}
export def create-services [pattern:string="*{pdf,jpg,jpeg,tif,tiff,png}", --num:int, --filter(-f):string,--dry-run] {
let input = $in
let files = (if ($input | is-empty) {
glob $pattern
} else {
$input
})
let filtered = (if $filter != null {
$files | find $filter
} else {
$files
})
let final = (if ($num != null) {
$filtered | shuffle | take $num
} else {
$filtered
})
if $dry_run {
return $final
}
$final | each { |f| {path: $f, name: ($f | path parse | get stem)} } | batch-create-service
}
export def batch-analyse [--concurrency_limit(-c):int=16, --rate_limit(-r):string="100/1sec"] {
to csv | ^xs -c $concurrency_limit -r $rate_limit batch-analyse | from csv
}
def "parse-date" [] {
let parsed = ($in | parse "{time}[{zone}]" | first)
if $parsed.zone == "UTC" {
return ($parsed.time | into datetime)
} else {
print -e $"WARN: unkown time zone ($parsed.zone)"
return ($parsed.time | into datetime)
}
}
export def "list-service" [] {
^xs list-service | from json
}
export alias la = list-analyse-tasks
export alias da = delete-analyse-tasks
export alias a = analyse
export def "list-analyse-tasks" [] {
^xs list-analyse-tasks | from csv
| reject "tenant"
| update createdAt {|row index| $row.createdAt | into datetime }
| update modifiedAt {|row index| $row.modifiedAt | into datetime }
| insert duration_secs {|row index | ($row.modifiedAt - $row.createdAt) / 1sec }
}