From a5a7812fc8cffbcc809ab66d7b68127782b0c3a4 Mon Sep 17 00:00:00 2001 From: Kinch Date: Thu, 7 Dec 2023 10:08:50 +0100 Subject: [PATCH] init --- address.nu | 6 ++ ar.nu | 3 + avocado-env.nu | 7 ++ aws-prod-env.nu | 13 +++ aws-test-env.nu | 14 +++ aws.nu | 66 ++++++++++++++ backup.nu | 7 ++ bd.nu | 3 + bm.nu | 29 +++++++ br.nu | 45 ++++++++++ cert.nu | 5 ++ chores.nu | 3 + completion.nu | 37 ++++++++ corpus.nu | 38 ++++++++ credm.nu | 18 ++++ dev-env.nu | 15 ++++ dev-tools.nu | 13 +++ dk.nu | 57 ++++++++++++ docker-repo.nu | 4 + duckling.nu | 18 ++++ env-paul.nu | 5 ++ filestore.nu | 34 ++++++++ flair.nu | 14 +++ fs.nu | 30 +++++++ gdl.nu | 5 ++ gradle.nu | 3 + health.nu | 76 ++++++++++++++++ hl.nu | 76 ++++++++++++++++ ht.nu | 20 +++++ img.nu | 11 +++ it.nu | 11 +++ job.nu | 51 +++++++++++ jwt.nu | 11 +++ k9s.nu | 3 + kafka.nu | 23 +++++ kube.nu | 110 ++++++++++++++++++++++++ local-env.nu | 11 +++ lyssa-env.nu | 8 ++ maven-dev.xml | 7 ++ mitm.nu | 7 ++ ms-form.nu | 19 ++++ ms-ocr.nu | 58 +++++++++++++ net.nu | 2 + nxs/mod.nu | 2 + nxs/regression.nu | 6 ++ nxs/remote.nu | 9 ++ nxs/settings.nu | 63 ++++++++++++++ nxs/traces.nu | 13 +++ progress.nu | 33 +++++++ properties.nu | 11 +++ regression.nu | 46 ++++++++++ s3.nu | 48 +++++++++++ sc.nu | 132 ++++++++++++++++++++++++++++ user-se-admin-aws-test.nu | 7 ++ user-se-api-aws-test.nu | 7 ++ xs-deploy.nu | 163 +++++++++++++++++++++++++++++++++++ xs.nu | 176 ++++++++++++++++++++++++++++++++++++++ 57 files changed, 1712 insertions(+) create mode 100644 address.nu create mode 100644 ar.nu create mode 100644 avocado-env.nu create mode 100644 aws-prod-env.nu create mode 100644 aws-test-env.nu create mode 100644 aws.nu create mode 100644 backup.nu create mode 100644 bd.nu create mode 100644 bm.nu create mode 100644 br.nu create mode 100644 cert.nu create mode 100644 chores.nu create mode 100644 completion.nu create mode 100644 corpus.nu create mode 100644 credm.nu create mode 100644 dev-env.nu create mode 100644 dev-tools.nu create mode 100644 dk.nu create mode 100644 docker-repo.nu create mode 100644 duckling.nu create mode 100644 env-paul.nu create mode 100644 filestore.nu create mode 100644 flair.nu create mode 100644 fs.nu create mode 100644 gdl.nu create mode 100644 gradle.nu create mode 100644 health.nu create mode 100644 hl.nu create mode 100644 ht.nu create mode 100644 img.nu create mode 100644 it.nu create mode 100644 job.nu create mode 100644 jwt.nu create mode 100644 k9s.nu create mode 100644 kafka.nu create mode 100644 kube.nu create mode 100644 local-env.nu create mode 100644 lyssa-env.nu create mode 100644 maven-dev.xml create mode 100644 mitm.nu create mode 100644 ms-form.nu create mode 100644 ms-ocr.nu create mode 100644 net.nu create mode 100644 nxs/mod.nu create mode 100644 nxs/regression.nu create mode 100644 nxs/remote.nu create mode 100644 nxs/settings.nu create mode 100644 nxs/traces.nu create mode 100644 progress.nu create mode 100644 properties.nu create mode 100644 regression.nu create mode 100644 s3.nu create mode 100644 sc.nu create mode 100644 user-se-admin-aws-test.nu create mode 100644 user-se-api-aws-test.nu create mode 100644 xs-deploy.nu create mode 100644 xs.nu diff --git a/address.nu b/address.nu new file mode 100644 index 0000000..e1c5bb0 --- /dev/null +++ b/address.nu @@ -0,0 +1,6 @@ +export def main [...query:string] { + let query = $query | str join " " + http get $"http://lyssa:8888/search?q='($query)'&addressdetails=1" + | reject licence boundingbox + #| first | get address +} diff --git a/ar.nu b/ar.nu new file mode 100644 index 0000000..f790e5f --- /dev/null +++ b/ar.nu @@ -0,0 +1,3 @@ +def ls [path:string] { + als $path +} diff --git a/avocado-env.nu b/avocado-env.nu new file mode 100644 index 0000000..e2b422f --- /dev/null +++ b/avocado-env.nu @@ -0,0 +1,7 @@ +export-env { + load-env { + KUBE_CONTEXT: "avocado" + HELM_KUBECONTEXT: "avocado" + XS_ENV_NAME: "AVOCADO(Incomplete)" + } +} \ No newline at end of file diff --git a/aws-prod-env.nu b/aws-prod-env.nu new file mode 100644 index 0000000..ba92749 --- /dev/null +++ b/aws-prod-env.nu @@ -0,0 +1,13 @@ +export-env { + load-env { + MS_OCR_AUTHORITY: "https://otc-dev.aws.de.insiders.cloud/ocr/microsoft", + FLAIR_AUTHORITY: "http://localhost:8082", + XS_SF_URL: "https://api.production.de.insiders.cloud", + XS_URL: "https://xs-backend.production.de.insiders.cloud", + XS_SF_ACCOUNT: "xs.prod.paul", + KUBE_CONTEXT: "aws-prod" + HELM_KUBECONTEXT: "aws-prod" + XS_ENV_NAME: "AWS_PROD" + AWS_INFASTRUCTURE_ACCOUNT: "047349208615.dkr.ecr.eu-central-1.amazonaws.com" + } +} \ No newline at end of file diff --git a/aws-test-env.nu b/aws-test-env.nu new file mode 100644 index 0000000..12ad1e1 --- /dev/null +++ b/aws-test-env.nu @@ -0,0 +1,14 @@ +export-env { + load-env { + MS_OCR_AUTHORITY: "https://otc-dev.aws.de.insiders.cloud/ocr/microsoft", + FLAIR_AUTHORITY: "http://localhost:8082", + XS_SF_URL: "https://api.test.de.insiders.cloud", + XS_URL: "https://xs-backend.test.de.insiders.cloud", + XS_SF_ACCOUNT: "xs.aws.test.tessi-admin", + KUBE_CONTEXT: "dev-test" + HELM_KUBECONTEXT: "dev-test" + XS_ENV_NAME: "AWS_TEST" + AWS_INFASTRUCTURE_ACCOUNT: "047349208615.dkr.ecr.eu-central-1.amazonaws.com" + XS_KAFKA_BOOSTRAP_SERVER: "localhost:9092" + } +} \ No newline at end of file diff --git a/aws.nu b/aws.nu new file mode 100644 index 0000000..c132dc1 --- /dev/null +++ b/aws.nu @@ -0,0 +1,66 @@ +def call_aws [args:list] { + run-external --redirect-stdout "aws" $args +} + +def comp-instanceId [] { + instances | each { |inst| + { + value: $inst.InstanceId, + description: $inst.InstanceType + } + } +} + +export def login-into-docker [] { + ^aws ecr get-login-password --region eu-central-1 | ^docker login --username AWS --password-stdin $env.AWS_INFASTRUCTURE_ACCOUNT +} + +export def login [] { + call_aws ["sso", "login"] + login-into-docker +} + +export def "restart instance" [instanceId:string@comp-instanceId] { + call_aws ["ec2", "reboot-instances", "--instance-ids", $instanceId] +} + +export def "stop instance" [instanceId:string@comp-instanceId] { + call_aws ["ec2", "stop-instances", "--instance-ids", $instanceId] +} + +export def "start instance" [instanceId:string@comp-instanceId] { + call_aws ["ec2", "start-instances", "--instance-ids", $instanceId] +} + +export def "list alarm" [] { + call_aws [cloudwatch describe-alarms] | from json | get MetricAlarms | select -i AlarmName AlarmDescription | rename name description +} + +def comp-alarm-name [] { + list alarm | get name +} + +export def alarm [alarm:string@comp-alarm-name] { + call_aws [cloudwatch describe-alarm-history --alarm-name $alarm] | from json + | get AlarmHistoryItems + | where HistoryItemType == StateUpdate + | update HistoryData {|r| $r.HistoryData | from json} + | flatten + | update oldState {|r| $r.oldState | get stateValue} + | update newState {|r| $r.newState | get stateValue} + | update Timestamp { |r| $r.Timestamp | into datetime } + | select AlarmName Timestamp HistorySummary oldState newState + | rename name age summary old new + | insert date {|r| $r.age | date to-timezone "Europe/Berlin" | date format } + | where new == ALARM + | reverse +} + +# list all ec2 instances +export def instances [] { + call_aws ["ec2", "describe-instances"] | from json + | get Reservations | get Instances | flatten + | select -i ImageId InstanceId InstanceType Architecture State KeyName PrivateIpAddress PublicIpAddress PrivateDnsName PublicDnsName Tags + | update State { |row| $row.State.Name } +} + diff --git a/backup.nu b/backup.nu new file mode 100644 index 0000000..d23f3a9 --- /dev/null +++ b/backup.nu @@ -0,0 +1,7 @@ +def call_restic [] { + run-external "restic" "-r" "/Volumes/data/backup" "backup" ~/.ssh ~/. +} + +export def main [] { + call_restic +} \ No newline at end of file diff --git a/bd.nu b/bd.nu new file mode 100644 index 0000000..0fb5c14 --- /dev/null +++ b/bd.nu @@ -0,0 +1,3 @@ +export def compare-tables [other] { + +} \ No newline at end of file diff --git a/bm.nu b/bm.nu new file mode 100644 index 0000000..4a0edff --- /dev/null +++ b/bm.nu @@ -0,0 +1,29 @@ +export def load [] { + if ("~/.nu_bookmarks.csv" | path exists) { + open ~/.nu_bookmarks.csv + } else { + [] + } +} + +export def get-completion [] { + load | rename description value +} + +def save-bookmarks [] { + uniq | save -f "~/.nu_bookmarks.csv" +} + +export def add [] { + load | append {path: (pwd), name: (pwd | path basename)} | save-bookmarks +} + +export def find [name:string@get-completion] { + load | where name == $name | last | get path +} + +export def-env main [name:string@get-completion] { + let dest = (load | where name == $name | last | get path) + cd $dest +} + diff --git a/br.nu b/br.nu new file mode 100644 index 0000000..958ee08 --- /dev/null +++ b/br.nu @@ -0,0 +1,45 @@ +def settings-db [] { + $env.HOME | path join ".br.db" +} + +def read-settings [] { + open (settings-db) +} + +def comp-name [] { + read-settings | query db "SELECT * FROM formula" | select name desc | rename value description +} + +# update the info about the formulars +export def update-cache [] { + let formulars = (http get https://formulae.brew.sh/api/formula.json) + let casks = (http get https://formulae.brew.sh/api/cask.json) + rm -f (settings-db) + $formulars | select name full_name desc | insert type "formula" | into sqlite --table_name "formula" (settings-db) + $casks | select token full_token desc | rename name full_name desc | insert type "cask" | into sqlite --table_name "formula" (settings-db) +} + +export def search [pattern:string,--exact] { + let settings = (read-settings) + if $exact { + $settings | query db $"SELECT * FROM formula WHERE name = '($pattern)'" + } else { + $settings | query db $"SELECT * FROM formula WHERE name LIKE '%($pattern)%'" + } +} + +export def install [name:string@comp-name] { + ^brew install $name +} + +# list all installed apps +export def "list installed" [] { + ["Caskroom", "Cellar"] | each { |x| + ls ("/opt/homebrew" | path join $x) | get name | each {|x| $x | path basename} + } | flatten +} + +export def up [] { + ^brew update + ^brew upgrade +} \ No newline at end of file diff --git a/cert.nu b/cert.nu new file mode 100644 index 0000000..17241db --- /dev/null +++ b/cert.nu @@ -0,0 +1,5 @@ +# converts an PEM (PKS8) private key to p12 keystore +export def "rsa private key to p12" [pem:path] { + let destination = ($pem | path parse | update extension p12 | path join) + ^openssl pkcs12 -export -nocerts -inkey $pem -out $destination +} \ No newline at end of file diff --git a/chores.nu b/chores.nu new file mode 100644 index 0000000..129dfd4 --- /dev/null +++ b/chores.nu @@ -0,0 +1,3 @@ +export def "build-nodes size" [] { + +} \ No newline at end of file diff --git a/completion.nu b/completion.nu new file mode 100644 index 0000000..2910c13 --- /dev/null +++ b/completion.nu @@ -0,0 +1,37 @@ +export def run [...x: any] { + let script = $"($env.PWD)/.nu" + nu $script ...$x +} + +export def "nu-complete just recipes" [] { + ^just --unstable --unsorted --dump --dump-format json + | from json + | get recipes + | transpose k v + | each {|x| + { + value: $x.k, + description: ( $x.v.parameters + | each {|y| $y.name} + | str join ' ' + ) + } + } +} + +export def "nu-complete just args" [context: string, offset: int] { + let r = ($context | split row ' ') + ^just --unstable -u --dump --dump-format json + | from json + | get recipes + | get ($r.1) + | get body + | each {|x| {description: ($x | get 0) }} + | prepend '' + +} + +export extern "just" [ + recipes?: string@"nu-complete just recipes" + ...args: any@"nu-complete just args" +] \ No newline at end of file diff --git a/corpus.nu b/corpus.nu new file mode 100644 index 0000000..318d4ab --- /dev/null +++ b/corpus.nu @@ -0,0 +1,38 @@ +def settings-db [] { + $env.HOME | path join ".corpus.db" +} + +def save-settings [] { + into sqlite (settings-db) --table_name "documents" +} + +def query-settings [query] { + open (settings-db) | query db $query +} + +def comp-corpus [] { + list | get corpus +} + +# create a corpus +export def "add documents" [coprus:string, pattern:string] { + glob $pattern | each {|x| {path: $x, corpus: $coprus}} | save-settings +} + +# list all corpora +export def list [] { + query-settings "SELECT DISTINCT corpus FROM documents" +} + +# list all documents of a corpus +export def "show" [coprus:string@comp-corpus] { + query-settings "SELECT DISTINCT path FROM documents" +} + +# choose randomly a file from corpus +export def choose [corpus:string@comp-corpus, --amount:int] { + let documents = (show corpus) + seq 1 $amount | each { + $documents | shuffle | first + } +} \ No newline at end of file diff --git a/credm.nu b/credm.nu new file mode 100644 index 0000000..b4a344e --- /dev/null +++ b/credm.nu @@ -0,0 +1,18 @@ +export def entries [] { + ^credm list-entries | from json +} + +export def tokens [] { + ^credm list-tokens | from json +} + +export def get [account:string@entries] { + ^credm get $account | from json +} + +export def update-token-response [account:string@entries] { + let token_response = $in + let expires_in = ($token_response.validUntil | into datetime) - (date now) + let expires_in_secs = ($expires_in / 1sec | math floor) + $token_response.token | ^credm set-token $account $"($expires_in_secs)s" +} \ No newline at end of file diff --git a/dev-env.nu b/dev-env.nu new file mode 100644 index 0000000..462301c --- /dev/null +++ b/dev-env.nu @@ -0,0 +1,15 @@ +export-env { + load-env { + MS_OCR_AUTHORITY: "http://10.54.150.152:5000", + DUCKLING_AUTHORITY: "http://duckling.xs.insiders.zz" + FLAIR_AUTHORITY: "https://flair-xs.dev.insiders.cloud", + XS_SF_URL: "https://api.internal.insiders.cloud", + XS_SF_ACCOUNT: "xs.dev.klara", + XS_URL: "https://backend-xs.dev.insiders.cloud", + XS_MANAGEMENT_URL: "https://management-xs.dev.insiders.cloud", + KUBE_CONTEXT: "k8s-xs" + HELM_KUBECONTEXT: "k8s-xs" + XS_ENV_NAME: "DEV" + XS_KAFKA_BOOSTRAP_SERVER: "localhost:9092" + } +} \ No newline at end of file diff --git a/dev-tools.nu b/dev-tools.nu new file mode 100644 index 0000000..0224ea0 --- /dev/null +++ b/dev-tools.nu @@ -0,0 +1,13 @@ +# parses an xml string to maven dependency artifact +export def "from maven-dep" [] { + get content + | each { |tag| { tag: $tag.tag, value: ($tag.content | get content | first ) } } + | reduce -f {} {|it, acc| $acc | upsert $it.tag $it.value } + +} + +# construct a gradle dependency from a dependency artifact +export def "to gradle-dep" [dep_type:string] { + let dep = $in + $"($dep_type)\(\"($dep.groupId):($dep.artifactId):($dep.version)\"\)" +} diff --git a/dk.nu b/dk.nu new file mode 100644 index 0000000..2143af6 --- /dev/null +++ b/dk.nu @@ -0,0 +1,57 @@ +def parse-time [] { + parse "{value} {unit} ago" | each {|x| + match $x.unit { + "hours" => "hr", + "days" => "d" + } + } +} + +# list images. The result of this can be piped into the most other commands for dk +export def images [] { + ^docker images --format "{{json . }}" + | from json --objects + | select CreatedSince Repository ID Size Tag + | each {|row| + let splitted_repo = ($row.Repository | split row '/' -n 2) + { + created-since: $row.CreatedSince, + host: ($splitted_repo | first), + tag: ($splitted_repo | last), + version: $row.Tag, + size: ($row.Size | into filesize) + id: $row.ID + } + } +} + +# like "tag" but based on image specs +export def "batch tag" [call] { + each {|image_spec| + let old = $image_spec + let new = (do $call $image_spec) + {old: $old, new: $new} + } | each {|x| + tag ($x.old | to docker path) ($x.new | to docker path) + $x.new + } +} + +# "rename" a docker image +export def tag [old:string, new:string] { + ^docker tag $old $new +} + +export def "push" [] { + each {|image_spec| + ^docker push ($image_spec | to docker path) + } +} + +# == implementation == + + +def "to docker path" [] { + let image_spec = ($in) + $"($image_spec.host)/($image_spec.tag):($image_spec.version)" +} diff --git a/docker-repo.nu b/docker-repo.nu new file mode 100644 index 0000000..a1c6826 --- /dev/null +++ b/docker-repo.nu @@ -0,0 +1,4 @@ +# list all images +export def list [] { + dkreg xtractionstudio-docker.insiders-technologies.de tree | lines | parse "{host}/{tag}:{version}" +} \ No newline at end of file diff --git a/duckling.nu b/duckling.nu new file mode 100644 index 0000000..3f14f18 --- /dev/null +++ b/duckling.nu @@ -0,0 +1,18 @@ +use ~/bin/nu_scripts/ht.nu + +export def supported-languages [] { + [ + "de_DE", + "en_GB", + "en_US", + "en_ES", + "fr_FR", + "it_IT", + "ja_JA", + ] +} + +export def main [--language(-l):string@supported-languages="de_DE"] { + let text = $in + ^xh POST (ht with-path $env.DUCKLING_AUTHORITY "parse") --form $"locale=($language)" 'dims=["time", "email", "url"]' $"text=($text)" | from json +} \ No newline at end of file diff --git a/env-paul.nu b/env-paul.nu new file mode 100644 index 0000000..1fbadcd --- /dev/null +++ b/env-paul.nu @@ -0,0 +1,5 @@ +export-env { + load-env { + XS_SF_ACCOUNT: "xs.dev.paul", + } +} \ No newline at end of file diff --git a/filestore.nu b/filestore.nu new file mode 100644 index 0000000..79f14c7 --- /dev/null +++ b/filestore.nu @@ -0,0 +1,34 @@ +use std log + +# upload documents to filestore and returns the location +export def put [glob:string, + --product:string="xs", + --tenantId:string="0000016", + ] { + expand-glob $glob + | par-each { |x| + let location = (put-single $x.name $x.type (random uuid) $product $tenantId) + { + file: $x.name, + location: $location + } + } | flatten +} + +def put-single [ + document:path, + content_type:string, + container_id:string, + product:string, + tenantId:string, + ] { + log debug $"Uploading file ($document) of type ($content_type)" + let filename = ($document | path basename) + http post -H ["Content-Type", $content_type] $"http://localhost:8300/api/v1/($product)/($tenantId)/($container_id)/($filename)" $document +} + + +def expand-glob [glob:string] { + glob -D $glob + | each {|x| ls --mime-type $x | first} +} \ No newline at end of file diff --git a/flair.nu b/flair.nu new file mode 100644 index 0000000..6ffd685 --- /dev/null +++ b/flair.nu @@ -0,0 +1,14 @@ +export def main [--authority:string] { + let input = $in + let host = $env.FLAIR_AUTHORITY + $input | each {|text| + http post -t application/json $"($host)/tags?limit=1" { text: $"($text)." } + } | flatten | get tokens +} + +export def batch [file_to_text_fn] { + each {|ocr_path| + print $"flair processing of ($ocr_path | path basename)" + {filename: $ocr_path} | insert flair (($ocr_path | do $file_to_text_fn) | main --authority $env.FLAIR_AUTHORITY) + } +} \ No newline at end of file diff --git a/fs.nu b/fs.nu new file mode 100644 index 0000000..ed788c3 --- /dev/null +++ b/fs.nu @@ -0,0 +1,30 @@ +# batch rename a list as path +export def rename [renamer,--dry-run] { + let paths = ($in) + let new_paths = ($paths | each { | old_path | $old_path | path parse } + | do $renamer) + let plan = $paths | zip $new_paths + + if ($dry_run) { + return $plan + } + $plan | batch rename +} + +# in: list> and rename first to second for every list. +export def "batch rename" [] { + each { | row | + mkdir ($row.1 | path parse | get parent) + mv $row.0 $row.1 -i + } +} + +# set the parent dir of a list +export def "append dir" [new_dir:string] { + update parent {|p| $p.parent | path join $new_dir } +} + +# set the filename of a list +export def "set filename" [new_name:string] { + each { |p| $p.parent | path join $new_name } +} \ No newline at end of file diff --git a/gdl.nu b/gdl.nu new file mode 100644 index 0000000..2b72a52 --- /dev/null +++ b/gdl.nu @@ -0,0 +1,5 @@ +def determine-java-home [] { + /usr/libexec/java_home | lines | first +} + +export alias main = ./gradlew \ No newline at end of file diff --git a/gradle.nu b/gradle.nu new file mode 100644 index 0000000..0d81260 --- /dev/null +++ b/gradle.nu @@ -0,0 +1,3 @@ +export def release-info [] { + http get "https://services.gradle.org/versions/current" +} \ No newline at end of file diff --git a/health.nu b/health.nu new file mode 100644 index 0000000..0006ee3 --- /dev/null +++ b/health.nu @@ -0,0 +1,76 @@ + +use sc.nu +use xs.nu +use bm.nu +use progress.nu +use std assert + +def "assert record contains" [record, key:string, msg:string] { + assert (($record | get -i $key | is-empty) == false) $msg --error-label { + start: (metadata $record).span.start, + end: (metadata $record).span.end, + text: $"the record does not contains the key ($key)", + } +} + +def "not" [value:bool] { + return !($value) +} + +def "assert is not empty" [list, msg:string] { + assert (not ($list | is-empty)) $msg --error-label { + start: (metadata $list).span.start, + end: (metadata $list).span.end, + text: $"the list is not empty but it is", + } + +} + +def test [name: string, call] { + let accum = $in + print $"Executing: ($name)..." + let next = (try { + let result = ($accum | do $call ($accum | last | get -i result | default {})) + print "PASS" + { + name: $name, + status: PASS, + result: $result, + } + } catch { |e| + throw $e.raw + { + name: $name, + status: FAIL, + error: $e.msg, + } + }) + $accum | append $next +} + +export def main [] { + [{}] + | test "can login into smart cloud" { |_| + let response = (sc request-token) + assert record contains $response "token" "Token response does not contains a token" + } + | test "can create a service" { |_| + xs service ([(bm find xs-reg), "00001.jpg"] | path join) | last + } + | test "service is in pending state" { |new_service| + print $new_service.externalId + let services = (xs list-service | where externalId == $new_service.externalId) + assert is not empty $services $"the new create service ($new_service | select name externalId ) can be listed" + $new_service + } + | test "service will be in success state eventually" { |new_service| + assert (progress wait-until { + print $"check status for ($new_service.externalId)" + let status = (xs list-service | where externalId == $new_service.externalId | last | get -i status | default "") + print $"Has status ($status)" + $status == "SUCCESS" + } --step-time 1sec --overall 20sec + ) + $new_service + } +} \ No newline at end of file diff --git a/hl.nu b/hl.nu new file mode 100644 index 0000000..ba33d92 --- /dev/null +++ b/hl.nu @@ -0,0 +1,76 @@ +export def helm-projects [] { + [{ + value: "xs-backend", + description: "/Volumes/extreme/projects/xtraction-factory/XtractionStudioBackend/src/main/helm/xs-backend" + }] +} + +export def hist [project:string@helm-projects] { + let helm_project_path = (helm-projects | where value == $project | first | get description | parse-helm-path) + cd $helm_project_path.directory + helm history ($helm_project_path.name) -o json | from json | update updated { |row| $row.updated | into datetime } +} + +export def rollback [project:string@helm-projects, revision?:int] { + let helm_project_path = (helm-projects | where value == $project | first | get description | parse-helm-path) + cd $helm_project_path.directory + if $revision == null { + helm rollback ($helm_project_path.name) + } else { + helm rollback ($helm_project_path.name) $revision + } +} + +# render a file from a helm chart. +export def render [ + project:string@helm-projects, # the project name + path:string@comp-template-files, # the template file relative to the templates folder + values:string@comp-values-files, # the value file in addition to the values.yaml which will always be used +] { + let helm_project_path = ($project | helm-project-directory) + ^helm template ($helm_project_path) -s ("templates" | path join $path) -f ($helm_project_path | path join $values) --debug +} + +# == completition + +export def comp-template-files [context: string] { + let project_path = ($context | split shell words | filter project-names | first | helm-project-directory) + let template_path = ([$project_path, "templates"] | path join) + glob $"($project_path)/templates/*.{yaml,yml}" | each { |template_file| + $template_file | path relative-to $template_path + } +} + +def comp-values-files [context: string] { + let project_path = ($context | split shell words | filter project-names | first | helm-project-directory) + glob $"($project_path)/values-*.{yaml,yml}" | each { |template_file| + $template_file | path relative-to $project_path + } +} + +# == implementation + +# like split words but split only on spaces (ignoring multiple spaces) +def "split shell words" [] { + split row " " | filter { |x| ($x | is-empty) == false } +} + +# filter words for valide project names +def "filter project-names" [] { + let words = $in + let project_names = (helm-projects | each {|x| $x.value}) + $words | filter {|word| $word in $project_names} +} + +def parse-helm-path [] { + let helm_path = $in + { + name: ($helm_path | path basename), + directory: ($helm_path | path dirname) + } +} + +def helm-project-directory [] { + let project_name = $in + helm-projects | where value == $project_name | first | get description +} \ No newline at end of file diff --git a/ht.nu b/ht.nu new file mode 100644 index 0000000..9a1ec06 --- /dev/null +++ b/ht.nu @@ -0,0 +1,20 @@ +export def get [url:string, --bearer:string] { + if bearer == nothing { + xh $url + } else { + print $url + http get -fe -H ["Authorization", $"bearer ($bearer)"] $url + } +} + +export def post [url:string] { + xh POST $url +} + +export def put [url:string, --bearer:string] { + xh PUT -A bearer -a $"($bearer)" $url +} + +export def with-path [authority:string, path:string] { + $authority | url parse | update path $path | url join | str trim -c '?' +} \ No newline at end of file diff --git a/img.nu b/img.nu new file mode 100644 index 0000000..e01715f --- /dev/null +++ b/img.nu @@ -0,0 +1,11 @@ +def comp-to-png [] { + glob *.pdf | each { |x| $x | path relative-to $"(pwd)" } +} +# convert (a pdf) to png +export def "to png" [pattern:string@comp-to-png] { + glob $pattern | each {|pdf| + let dest = ($pdf | path parse | update extension png | path join) + ^convert -density 300 $pdf $dest + $dest + } +} \ No newline at end of file diff --git a/it.nu b/it.nu new file mode 100644 index 0000000..d9d334a --- /dev/null +++ b/it.nu @@ -0,0 +1,11 @@ +# zip two lists producing a table where the 'key' list are the keys. +export def "zip-keys" [ # -> list + keys: list + ] { + zip $keys | reduce -f {} { |it, acc| $acc | upsert $it.1 $it.0 } +} + +# split a table into equally sized chunks +export def chunk [chunk_size:int] { + window $chunk_size --stride $chunk_size -r +} \ No newline at end of file diff --git a/job.nu b/job.nu new file mode 100644 index 0000000..f2627f8 --- /dev/null +++ b/job.nu @@ -0,0 +1,51 @@ +# spawn task to run in the background +# +# please note that the it spawned a fresh nushell to execute the given command +# So it doesn't inherit current scope's variables, custom commands, alias definition, except env variables which value can convert to string. +# +# e.g: +# spawn { echo 3 } +export def spawn [ + command: block, # the command to spawn +] { + let config_path = $nu.config-path + let env_path = $nu.env-path + let source_code = (view source $command | str trim -l -c '{' | str trim -r -c '}') + let job_id = (pueue add -p $"nu --config \"($config_path)\" --env-config \"($env_path)\" -c '($source_code)'") + {"job_id": $job_id} +} + +export def log [ + id: int # id to fetch log +] { + pueue log $id -f --json + | from json + | transpose -i info + | flatten --all + | flatten --all + | flatten status +} + +# get job running status +export def status () { + pueue status --json + | from json + | get tasks + | transpose -i status + | flatten + | flatten status +} + +# kill specific job +export def kill (id: int) { + pueue kill $id +} + +# clean job log +export def clean () { + pueue clean +} + +export def follow [] { + pueue follow +} \ No newline at end of file diff --git a/jwt.nu b/jwt.nu new file mode 100644 index 0000000..2319cb8 --- /dev/null +++ b/jwt.nu @@ -0,0 +1,11 @@ +use ~/bin/nu_scripts/it.nu + +# parses a signed jwt token +export def parse [] { + let splitted = (split row '.') + { + header: ($splitted | get 0 | decode base64 -c url-safe-no-padding | from json), + payload: ($splitted | get 1 | decode base64 -c url-safe-no-padding | from json), + signature: ($splitted | get 2) + } +} \ No newline at end of file diff --git a/k9s.nu b/k9s.nu new file mode 100644 index 0000000..1a49a7c --- /dev/null +++ b/k9s.nu @@ -0,0 +1,3 @@ +export def main [] { + run-external "k9s" "--context" $env.KUBE_CONTEXT +} \ No newline at end of file diff --git a/kafka.nu b/kafka.nu new file mode 100644 index 0000000..f044773 --- /dev/null +++ b/kafka.nu @@ -0,0 +1,23 @@ +def call_kafka [args] { + run-external --redirect-stdout kafka-admin-cli $args +} + +# list all kafka topics on your broker +export def topics [] { + call_kafka list-topics | lines | sort +} + +# delete topics. Reads the topic names from plain stdin +export def "delete topics" [] { + $in | to text | call_kafka delete-topics +} + +# describe topics. Reads the topic names from plain stdin +export def "describe topics" [] { + $in | to text | call_kafka describe-topics +} + +# get the info for a list of topics. Reads the topic names from plain stdin +export def "topic info" [] { + $in | to text | call_kafka topics-info | from json +} \ No newline at end of file diff --git a/kube.nu b/kube.nu new file mode 100644 index 0000000..b1290fe --- /dev/null +++ b/kube.nu @@ -0,0 +1,110 @@ +export def list-pods [] { + kubectl --context $env.KUBE_CONTEXT get pods --output json | from json | get items +} + +export def pods [--running(-r)] { + if $running { + containers | where ready | select pod | uniq + } else { + containers | get pod | uniq + } +} + +export def apply [] { + kubectl --context $env.KUBE_CONTEXT apply -f - +} + +export def containers [] { + list-pods | each {|item| + $item.status.containerStatuses | flatten | select image name restartCount ready + | insert pod ($item.metadata.name) + | insert creation ($item.metadata.creationTimestamp) + | insert nodeName ($item.spec.nodeName) + } | flatten +} + +def list-images [] { + containers | get image | uniq +} + +export def set-image [deployment:string@apps, image:string@list-images] { + kubectl --context $env.KUBE_CONTEXT set image $"deployment/($deployment)" $"($deployment)=($image)" +} + +export def "del app-pods" [app:string@apps] { + kubectl --context $env.KUBE_CONTEXT delete pods -l $"app=($app)" +} + +# list all app names +export def apps [] { + containers | get name | uniq +} + +# list all availble deployments +export def deployments [] { + call_kube ["get", "deployments"] | lines | parse "{name} {rest}" | skip | get name +} + + +def when [condition:bool, and_block] { + let stream = $in + if ($condition) { + $stream | do $and_block + } else { + $stream + } +} + +def call_kube [args,--quiet] { + run-external --redirect-stdout "kubectl" "--context" $env.KUBE_CONTEXT $args +} + +export def "pod logs" [...pods:string@pods] { + let args = [logs] +} + +export def "deployment restart" [deployment:string@deployments] { + call_kube [rollout, restart, deployment, $deployment] +} + +export def "deployment restart status" [deployment:string@deployments] { + call_kube [rollout, restart, deployment, $deployment] +} + +# show the logs of all containers with a certain app label +export def "app logs" [ + ...app:string@apps, # the name of app + --json (-j), # if set parse the log as jsongn + --since:duration # how old the logs should be + --timestamps (-t), + --previous (-p), + --prefix, + ] { + let args = [logs] + let args = ($args | append $"-l app in \(($app | str join ',')\)") + let args = if $since != null { $args | append $"--since=($since / 1min)m"} else { $args } + let args = if $timestamps { $args | append "--timestamps" } else { $args } + let args = if $previous { $args | append "--previous" } else { $args } + let args = if $prefix { $args | append "--prefix" } else { $args } + call_kube $args + | when $json { from json --objects } +} + +# List the logs for a deployment +export def "deployment logs" [ + deployment:string@deployments + --json (-j), # if set parse the log as json + --since:duration # how old the logs should be + --timestamps (-t), + --previous (-p), + --prefix, +] { + let args = [logs] + let args = ($args | append $"deployment/($deployment)") + let args = if $since != null { $args | append $"--since=($since / 1min)m"} else { $args } + let args = if $timestamps { $args | append "--timestamps" } else { $args } + let args = if $previous { $args | append "--previous" } else { $args } + let args = if $prefix { $args | append "--prefix" } else { $args } + call_kube $args + | when $json { lines | filter { |line| $line | str starts-with '{' } | each { |line| $line | from json } } +} \ No newline at end of file diff --git a/local-env.nu b/local-env.nu new file mode 100644 index 0000000..eabd9b9 --- /dev/null +++ b/local-env.nu @@ -0,0 +1,11 @@ +export use ~/bin/nu_scripts/dev-env.nu + +export-env { + load-env { + XS_URL: "http://localhost:8080", + FLAIR_AUTHORITY: "http://lyssa:8082", + XS_MANAGEMENT_URL: "http://localhost:8081", + XS_ENV_NAME: "LOCAL" + XS_KAFKA_BOOSTRAP_SERVER: "lyssa:9092" + } +} \ No newline at end of file diff --git a/lyssa-env.nu b/lyssa-env.nu new file mode 100644 index 0000000..8850e5b --- /dev/null +++ b/lyssa-env.nu @@ -0,0 +1,8 @@ + +export use ~/bin/nu_scripts/dev-env.nu + +export-env { + load-env { + FLAIR_AUTHORITY: "http://lyssa:8082", + } +} \ No newline at end of file diff --git a/maven-dev.xml b/maven-dev.xml new file mode 100644 index 0000000..46db86f --- /dev/null +++ b/maven-dev.xml @@ -0,0 +1,7 @@ + + + org.apache-extras.beanshell + bsh + 2.0b6 + + \ No newline at end of file diff --git a/mitm.nu b/mitm.nu new file mode 100644 index 0000000..1e028c9 --- /dev/null +++ b/mitm.nu @@ -0,0 +1,7 @@ +export def "forward cloud" [] { + mitmproxy -p 7070 --mode $"reverse:($env.XS_SF_URL)" +} + +export def "forward backend" [] { + mitmproxy -p 7071 --mode $"reverse:($env.XS_URL)" +} diff --git a/ms-form.nu b/ms-form.nu new file mode 100644 index 0000000..5c646da --- /dev/null +++ b/ms-form.nu @@ -0,0 +1,19 @@ +use std assert + + +export def comp-authority [] { + [ + { value: "http://10.54.150.152:5001", description: "internal ms ocr form recognizer" } + ] +} + +export def main [file:path, --authority:string@comp-authority] { + let authority = if $authority == $nothing { + assert ("MS_OCR_AUTHORITY" in $env) $"no authority specified. Use either an environment or spezify --host-name" + $env.MS_OCR_AUTHORITY + } else { + $authority + } + + http post -H [content-type, application/octet-stream] $"($authority)/formrecognizer/documentModels/prebuilt-read:syncAnalyze?api-version=2022-08-31" (open $file) +} \ No newline at end of file diff --git a/ms-ocr.nu b/ms-ocr.nu new file mode 100644 index 0000000..84c02a5 --- /dev/null +++ b/ms-ocr.nu @@ -0,0 +1,58 @@ +use std assert + +def comp-authority [] { + [ + { value: "http://10.54.150.152:5000", description: "internal ms ocr"}, + ] +} + +export def main [pdf:path, --authority:string@comp-authority] { + let authority = if $authority == $nothing { + assert ("MS_OCR_AUTHORITY" in $env) $"no authority specified. Use either an environment or spezify --host-name" + $env.MS_OCR_AUTHORITY + } else { + $authority + } + + http post -H [content-type, application/octet-stream] $"($authority)/vision/v3.2/read/syncAnalyze?language=de&readingOrder=natural" (open $pdf) +} + +# list => [{filename: str, ocr: table}] +export def batch [] { + each { |image_path| + print $"ms ocr processing of (ansi gb)($image_path | path basename)(ansi reset)" + if ($image_path | path exists) { + let result = (main $image_path) + {filename: $image_path, ocr: $result} + } + } +} + +# get the pages from ocr +export def "slice pages" [...page_numbers:int] { + let ocr = ($in) + let slice = ($ocr | get analyzeResult.readResults | enumerate + | where { |x| $x.index in $page_numbers } + | get item + | wrap readResults + ) + $ocr | update analyzeResult $slice +} + +export def create-ocr-cache [directory:string=""] { + mkdir ocr_cache + glob $"([$directory, *{pdf,png,jpg,jpeg,tif,tiff}] | path join)" | batch | each { |x| $x.ocr | save --force $"ocr_cache/($x.filename | path basename)-ms-ocr.json" } +} + + +export def "to-words" [] { + get analyzeResult.readResults.lines | flatten | get words | flatten | get text +} + +export def "to-lines" [] { + get analyzeResult.readResults.lines | flatten | get text +} + +export def "to-text" [] { + to-lines | str join '. ' +} \ No newline at end of file diff --git a/net.nu b/net.nu new file mode 100644 index 0000000..634a4b5 --- /dev/null +++ b/net.nu @@ -0,0 +1,2 @@ + +export def services [host:string, service:string] {} \ No newline at end of file diff --git a/nxs/mod.nu b/nxs/mod.nu new file mode 100644 index 0000000..0fde683 --- /dev/null +++ b/nxs/mod.nu @@ -0,0 +1,2 @@ +export module settings.nu +export module traces.nu \ No newline at end of file diff --git a/nxs/regression.nu b/nxs/regression.nu new file mode 100644 index 0000000..3d0931d --- /dev/null +++ b/nxs/regression.nu @@ -0,0 +1,6 @@ +use remote.nu + +export def run [] { + print "test" + remote sync +} \ No newline at end of file diff --git a/nxs/remote.nu b/nxs/remote.nu new file mode 100644 index 0000000..80366b7 --- /dev/null +++ b/nxs/remote.nu @@ -0,0 +1,9 @@ +use settings.nu +use settings.nu [comp-host-names, comp-project-names] + +# sync code to remove machine +export def "sync code" [host:string@comp-host-names, project:string@comp-project-names] { + # print $"sync with ($environment.build_host)" + # run-external "rsync" "-avPq" "--delete" "--filter=:- .gitignore" "--exclude" .git $"(project-root)/" $"($environment.build_host):(build-host-path)" +} + diff --git a/nxs/settings.nu b/nxs/settings.nu new file mode 100644 index 0000000..038129e --- /dev/null +++ b/nxs/settings.nu @@ -0,0 +1,63 @@ +def settings-db [] { + $env.HOME | path join ".nxs.db" +} +def save [table_name:string] { + into sqlite (settings-db) --table_name $table_name +} + +def query-settings [query:string] { + print $query + open (settings-db) | query db $query +} + +export def add-host [host:string, working_dir:string] { + query-settings $"DELETE FROM hosts WHERE host is '($host)'" + [{host:$host, working_dir: $working_dir}] | save "hosts" +} + +export def get-host [host:string] { + query-settings $"SELECT * FROM hosts WHERE host = '($host)'" +} + +export def add-path [section:string, key:string, value:path] { + [{key:string, value:$value}] | save $section +} + +export def add [section:string, key:string, value?:string] { + [{key:string, value:$value}] | save $section +} + +export def set-path [section:string, key:string, value:path] { + del $section $key + [{key:$key, value:$value}] | save $section +} + +export def set [section:string, key:string, value?:string] { + del $section $key + [{key:$key, value:$value}] | save $section +} + +export def del [section:string, key:string] { + query-settings $"DELETE FROM ($section) WHERE key is '($key)'" +} + +export def get [section:string@"comp-section-names"] { + query-settings $"SELECT * FROM ($section)" +} + +export def "comp-section-names" [] { + open (settings-db) | columns +} + +export def "comp-project-names" [] { + get "projects" | select key value | rename value description +} + +export def "comp-host-names" [] { + open (settings-db) | get hosts +} + +# print settings +export def main [] { + open (settings-db) +} \ No newline at end of file diff --git a/nxs/traces.nu b/nxs/traces.nu new file mode 100644 index 0000000..a9e2b4f --- /dev/null +++ b/nxs/traces.nu @@ -0,0 +1,13 @@ +# list the last `limit` traces. filtering out traces with less than 100 spans +export def main [limit:int=1000] { + http get $"http://lyssa:9411/api/v2/traces?limit=($limit)" + | filter noice traces +} + +export def "tags of" [span:string] { + +} + +def "filter noice traces" [] { + where {|t| ($t | length) > 100} +} diff --git a/progress.nu b/progress.nu new file mode 100644 index 0000000..28068eb --- /dev/null +++ b/progress.nu @@ -0,0 +1,33 @@ +export def wait-for [wait_time:duration] { + let steps = $wait_time / 1sec + seq 0 $steps | each { |i| + print -n -e $"(ansi erase_entire_line)\r($wait_time - ($i * 1sec))" + sleep 1sec + } + print "" +} + +export def wait-until [call, --step-time:duration, --overall:duration] { + let input_stream = $in + let end_time = (date now) + $overall + loop { + if (do $call $input_stream) { + return true + } + + if ($end_time < (date now)) { + return false + } + + wait-for $step_time + } +} + +export def "progressed-each" [call] { + let stream = ($in) + let overall = ($stream | length) + $stream | enumerate | each { |x| + print -n -e $"(ansi erase_entire_line)\r($x.index)/($overall)" + do $call $x.item + } +} \ No newline at end of file diff --git a/properties.nu b/properties.nu new file mode 100644 index 0000000..8ffe1e0 --- /dev/null +++ b/properties.nu @@ -0,0 +1,11 @@ +export def "from properties" [] { + lines + | parse '{key}={value}' +} +export def get-value [key:string] { + lines + | parse '{key}={value}' + | where key == $key + | get -i value + | to text +} diff --git a/regression.nu b/regression.nu new file mode 100644 index 0000000..97bb841 --- /dev/null +++ b/regression.nu @@ -0,0 +1,46 @@ +use s3.nu + +export def latest-daily [] { + let latest = (s3 files xs-dev xs-reg-result/basic | sort-by lastModified | reverse | where key =~ "daily" | first) + [ + "xs-dev", + ($latest.url | url parse | get path | str trim -c '/'), + $latest.key + ] | path join | s3 get-csv +} + +export def latest-daily-detail [] { + let latest = (s3 files xs-dev xs-reg-result/detail | sort-by lastModified | reverse | where key =~ "daily" | first) + [ + "xs-dev", + ($latest.url | url parse | get path | str trim -c '/'), + $latest.key + ] | path join | s3 get-csv + +} + +export def prop-fields [] { + get proposedFields.fields | flatten | flatten | select name type value +} + +export def "detail worst" [type:string,--limit:int=3,--error-category=["FP", "FN"]] { + where Fieldtype =~ $type | group-by Filename | transpose filename data + | each { |f| $f | insert errors ($f.data | where Classification in $error_category | get Count | default 0 ) } + | sort-by errors | reverse | take $limit +} + +export def delta [list1, list2] { + $list1 | zip $list2 | each {|r| + $r.0 - $r.1 + } +} + +# compares a report.csv with a baseline +export def "compare daily" [] { + let report = ($in) + let f1_report = $report | get "F1 Score" + let f1_latest = (latest-daily | get "F1 Score") + let f1_delta = (delta $f1_report $f1_latest) + let f1_delta_row = ($f1_delta | wrap "F1 Score Delta") + $report | merge $f1_delta_row +} \ No newline at end of file diff --git a/s3.nu b/s3.nu new file mode 100644 index 0000000..bc97024 --- /dev/null +++ b/s3.nu @@ -0,0 +1,48 @@ +def mc-hosts [] { + open ~/.mc/config.json | get aliases | columns +} + +export def buckets [host:string@mc-hosts] { + ^mc ls --json $host | from json --objects | get key +} + +export def "del bucket" [host:string@mc-hosts, bucket:string] { + ^mc rb --force $"($host)/($bucket)" +} + +export def "download bucket" [host:string@mc-hosts, bucket:string] { + ^mc cp --recursive ([$host, $bucket ] | path join) . +} + +# delete all files within a bucket (but not the bucket itself) +export def "empty bucket" [host:string@mc-hosts, bucket:string:string] { + ^mc rm --recursive --force ([$host, $bucket] | path join) +} + +export def "mk bucket" [host:string@mc-hosts, bucket:string] { + ^mc mb $"($host)/($bucket)" +} + +export def files [host:string@mc-hosts,prefix:string=""] { + ^mc ls -r --json ([$host, $prefix] | path join) | from json --objects + | insert host $host + | insert prefix $prefix +} + +export def folders [host:string@mc-hosts] { + ^mc ls --json $host | from json --objects +} + +export def "delete files" [] { + let files = ($in | each { |x| [$x.host, $x.prefix, $x.key] | path join }) + run-external --redirect-stdout mc rm $files +} + +export def "cat files" [] { + let files = ($in | each { |x| [$x.host, $x.prefix, $x.key] | path join }) + run-external --redirect-stdout mc cat $files +} + +export def get-csv [] { + ^mc cat $in | from csv +} \ No newline at end of file diff --git a/sc.nu b/sc.nu new file mode 100644 index 0000000..42faddf --- /dev/null +++ b/sc.nu @@ -0,0 +1,132 @@ +use ~/bin/nu_scripts/ht.nu +use ~/bin/nu_scripts/jwt.nu +use ~/bin/nu_scripts/credm.nu + +def token-name [] { + $"($env.XS_SF_ACCOUNT).token" +} + +export def request-token [] { + credm get $env.XS_SF_ACCOUNT + | select login password + | rename username password + | to json + | ht post (ht with-path $env.XS_SF_URL "/1/rest/accounts/authentication/requesttoken") + | from json +} + +def today [] { + date now | format date "%Y%m%d" +} + +export def request-tenant-key [] { + let token = (request-token) + let tenant_id = ($token | get token | jwt parse | get payload.tenantId) + let url = (ht with-path $env.XS_SF_URL $"/1/rest/security/keyvault/key/($tenant_id)/(today)") + ht get $url --bearer $token.token +} + +def sc-get [path:string] { + ^xh get -A bearer -a $"(provide-access-token)" (ht with-path $env.XS_SF_URL $path) | from json +} + +def sc-post [path:string] { + ^xh post -A bearer -a $"(provide-access-token)" (ht with-path $env.XS_SF_URL $path) | from json +} + +def sc-put [path:string] { + ^xh put -A bearer -a $"(provide-access-token)" (ht with-path $env.XS_SF_URL $path) | from json +} +def sc-delete [path:string] { + ^xh delete -A bearer -a $"(provide-access-token)" (ht with-path $env.XS_SF_URL $path) | from json +} + +export def-env update-access-token [] { + print -e $"Login as (ansi pb)($env.XS_SF_ACCOUNT)(ansi reset)" + let token_response = (request-token) + if "ErrorCode" in $token_response { + print "ERROR" + let error_msg = $"Could not login into smart cloud as user ($env.XS_SF_ACCOUNT): ($token_response | get Message)" + error make { + msg: $error_msg + } + } + $token_response | credm update-token-response (token-name) + $token_response.token +} + +export def provide-access-token [] { + let token = (credm get (token-name)) + if not "password" in $token { + update-access-token + } else { + $token.password + } +} + +export def list-subsystems [] { + sc-get "/1/rest/subsystems/management/" | get subsystemInfos +} + +export def upload-sub [name:string@xs-subsystem-names, sub_path:string] { + ^xh put --form -A bearer -a $"(provide-access-token)" (ht with-path $env.XS_SF_URL $"/1/rest/subsystems/management/($name)") 'subsystem-meta-data={ "description" : "Eine tolle Beschreibung" }' $"subsystem-data@($sub_path)" + | from json +} + +export def xs-subsystem-names [] { + [ + "XS-Service-Test", + "XS-Service-Prod", + ] +} + +def xs-matching-db-table-names [] { + [ + "XS_SETTINGS" + ] +} + + +export def start-transaction [ + subsystem:string@xs-subsystem-names, + table:string@xs-matching-db-table-names, + columns:list + --clear +] { + { + subsystem: $subsystem, + tableMetaData: [{ + tableName: $table, + clear: $clear, + columns: $columns + }] + } | to json + | sc-post /1/rest/subsystems/transactions/ +} + +export def tenant-settings [] { + let creds = (credm get $env.XS_SF_ACCOUNT) + { + "XS_BACKEND_URL": (ht with-path $env.XS_URL "/"), + "SF_URL": (ht with-path $env.XS_SF_URL "/"), + "TENANT_USER": $creds.login, + "TENANT_PASSWORD": $creds.password + } +} + +export def commit-transaction [transaction_id:string] { + sc-delete $"/1/rest/subsystems/transactions/($transaction_id)/commit" +} + +export def settings-db-row-count [name:string@xs-subsystem-names] { + sc-get $"/1/rest/subsystems/masterdata/count/($name)/XS_SETTINGS/Active" +} + +export def update-tenant-settings [name:string@xs-subsystem-names] { + print -e $"Update settings for tenant ($env.XS_SF_ACCOUNT) and subsystem ($name)" + let transaction_id = (start-transaction $name XS_SETTINGS ["SETTINGS_KEY", "SETTINGS_VALUE"] --clear | get transactionId) + print -e $"transaction id: (ansi pb)($transaction_id)(ansi reset)" + tenant-settings | transpose | rename key value | each { |x| [$x.key,$x.value] } | to json + | sc-put $"/1/rest/subsystems/transactions/($transaction_id)/XS_SETTINGS" + commit-transaction $transaction_id +} \ No newline at end of file diff --git a/user-se-admin-aws-test.nu b/user-se-admin-aws-test.nu new file mode 100644 index 0000000..9bc75b3 --- /dev/null +++ b/user-se-admin-aws-test.nu @@ -0,0 +1,7 @@ +use ~/bin/nu_scripts/aws-test-env.nu +export-env { + load-env { + XS_ENV_NAME: "AWS_TEST(SE-ADMIN)" + XS_SF_ACCOUNT: "insiders-admin@se.insiders.cloud", + } +} \ No newline at end of file diff --git a/user-se-api-aws-test.nu b/user-se-api-aws-test.nu new file mode 100644 index 0000000..8260bda --- /dev/null +++ b/user-se-api-aws-test.nu @@ -0,0 +1,7 @@ +use ~/bin/nu_scripts/aws-test-env.nu +export-env { + load-env { + XS_ENV_NAME: "AWS_TEST(SE)" + XS_SF_ACCOUNT: "xs.aws.test.se.insiders-api", + } +} \ No newline at end of file diff --git a/xs-deploy.nu b/xs-deploy.nu new file mode 100644 index 0000000..727d8f8 --- /dev/null +++ b/xs-deploy.nu @@ -0,0 +1,163 @@ +use ~/bin/nu_scripts/bm.nu +use ~/bin/nu_scripts/hl.nu +use ~/bin/nu_scripts/kube.nu +use std log + +# == Environment == +def project-root [] { bm find XtractionStudioBackend } +def helm-project-name [] { "xs-backend" } +def build-host-path [] { "workingdirs/xtractionstudio" } + +export def component-definitions [] { + [ + {app: demo, deployment: xs-demo, target: "pushDemoImage"}, + {app: logging-proxy, deployment: xs-logging-proxy, target: "pushLoggingProxyImage"}, + {app: backend, deployment: xs-backend}, + {app: ducklingClient, deployment: xs-duckling}, + {app: flairClient, deployment: xs-flair}, + {app: imageRenderer, deployment: xs-image-renderer}, + {app: languageExtractor, deployment: xs-language-extractor}, + {app: ocrService, deployment: xs-ocr-service}, + {app: addressExtractor, deployment: xs-address-extractor}, + {app: steward, deployment: xs-steward}, + {app: statisticsService, deployment: xs-statistics-creator}, + {app: bankExtractor, deployment: xs-bank-extractor}, + {app: scriptExtractor, deployment: xs-script-extractor}, + ] +} + +def comp-app-name [] { + apps +} + +def apps [] { + component-definitions | get app +} + +def find-app-target [app_name:string] { + component-definitions | where app == $app_name | first | get -i target | default $"apps:($app_name):dockerPushImage" +} + +def environments [] { + [ + { + name: "dev", + docker_repo: "xtractionstudio-docker.insiders-technologies.de", + helm_target: "Development", + needs_aws_login: false, + build_host: "lyssa", + }, + { + name: "aws-test", + docker_repo: "047349208615.dkr.ecr.eu-central-1.amazonaws.com", + helm_target: "Awstest", + needs_aws_login: true, + build_host: "gitlab-build-host", + }, + { + name: "aws-prod", + docker_repo: "047349208615.dkr.ecr.eu-central-1.amazonaws.com", + helm_target: "Awsprod", + needs_aws_login: true, + build_host: "gitlab-build-host", + }, + ] +} + +def find-environment [] { + let name = $in + environments | where name == $name | first +} + +# == Completitions == +def comp-environment-name [] { + environments | get name +} + + +# == Externals == +def call_gradle [args] { + let gradle_wrapper = ([(project-root), "gradlew"] | path join) + log debug $"use gradle_wrapper location: ($gradle_wrapper)" + run-external $gradle_wrapper $args +} + +def sync [environment] { + print $"sync with ($environment.build_host)" + run-external "rsync" "-avP" "--delete" "--filter=:- .gitignore" "--exclude" .git $"(project-root)/" $"($environment.build_host):(build-host-path)" +} + +def remote_aws_login [$environment] { + ssh ($environment.build_host) -t $"aws ecr get-login-password --region eu-central-1 | docker login --username AWS --password-stdin ($environment.docker_repo)" +} + +def remote_gradle [environment, args:list] { + print $"execute gradle with args: ($args)" + ssh $environment.build_host -t $"cd (build-host-path) ; EXCLUDE_PUSH_IMAGE=yes ./gradlew ($args | str join ' ')" +} + +def build-on-host [environment, apps:list] { + # let gradle_targets = ($apps | each {|app| find-app-target $app }) + execute-gradle-task-on-env $environment "pushAllToDev" +} + +def execute-gradle-task-on-env [environment, task:string] { + let gradle_args = (["--project-prop", $"docker.repo=($environment.docker_repo)", "--parallel"] | append $task) + if ($environment.needs_aws_login) { + remote_aws_login $environment + } + remote_gradle $environment $gradle_args +} + +def helm-deploy [environment] { + print "Install helm app" + remote_gradle $environment [$"helmInstallXsBackendTo($environment.helm_target)"] +} +# build and pushing the image without changing the k8 chart +export def "push images" [environment_name:string@comp-environment-name] { + let environment = ($environment_name | find-environment) + cd (project-root) + build-on-host $environment (apps) +} + +# install the complete helm app the the choosen environment, building all container +export def "full" [environment_name:string@comp-environment-name] { + let environment = ($environment_name | find-environment) + cd (project-root) + sync $environment + build-on-host $environment (apps) + helm-deploy $environment +} + +# execute a single gradle task on remote machine +export def "task" [environment_name:string@comp-environment-name, task:string] { + let environment = ($environment_name | find-environment) + cd (project-root) + sync $environment + execute-gradle-task-on-env $environment $task +} + +# only install the current helm chart (without building and pushing the images) +export def "helm app" [environment_name:string@comp-environment-name] { + let environment = ($environment_name | find-environment) + cd (project-root) + sync $environment + helm-deploy $environment +} + +# update single deployments i.e. pushing the docker image and restart the deployment +export def "update app" [environment_name:string@comp-environment-name, app_name:string@comp-app-name] { + let environment = ($environment_name | find-environment) + cd (project-root) + sync $environment + build-on-host $environment [$app_name] + let component = (component-definitions | where app == $app_name | first) + print $"restart k8s deployment ($component.deployment)" + kube deployment restart $component.deployment +} + +export def "sync code" [environment_name:string@comp-environment-name] { + let environment = ($environment_name | find-environment) + cd (project-root) + sync $environment +} diff --git a/xs.nu b/xs.nu new file mode 100644 index 0000000..5af8a55 --- /dev/null +++ b/xs.nu @@ -0,0 +1,176 @@ +use ~/bin/nu_scripts/ht.nu +use ~/bin/nu_scripts/credm.nu +use ~/bin/nu_scripts/sc.nu + +export def actuators [] { + http get (ht with-path $env.XS_MANAGEMENT_URL "/actuator") | from json | get _links +} + +export def "script source" [] { + let body = $in + ht put (ht with-path "http://localhost:8230" "/source") --bearer (sc provide-access-token) +} + +def actuators-names [] { + actuators | columns +} + +def metrics-names [] { + actuator metrics | get names +} + +export def comp-service-ids [] { + xs list-service | select externalId name | rename value description +} + +export def "delete service" [service_id:string@comp-service-ids] { + {id: $service_id} | to csv | ^xs delete-service +} + +export def actuator [name:string@actuators-names] { + http get (actuators | get $name | get href) | from json +} + +export def metrics [name:string@metrics-names] { + http get (ht with-path $env.XS_MANAGEMENT_URL $"/actuator/metrics/($name)") | from json +} + +export def batch-create-service [pattern?:string, --clean] { + $in | to csv | ^xs batch-create-service | from csv +} + +export def delete-all-service [] { + ^xs delete-all-service +} + +export def "get proposed fields" [] { + ^xs proposed-fields | from json +} + +# download the analyse results to file +export def "get analyse result" [ + analyse_task_id:string@comp-analyse-task-id + ] { + collect-task-ids $analyse_task_id | to csv | ^xs download-analyse-result +} + +# download the images from analyse result +export def "get analyse images" [analyse_task_id?:string@comp-analyse-task-id] { + collect-task-ids $analyse_task_id | to csv | ^xs download-analyse-images +} + +def comp-analyse-task-id [] { + list-analyse-tasks | get id +} + +def collect-task-ids [task_id?:string] { + let tasks = ($in) + if ($task_id != null) { + [{id: $task_id}] + } else { + $tasks | select id + } +} + + +export def "group proposed fields" [] { + get fields | flatten | flatten | select name type value | group-by name | values | each { |row| $row | { name: ($row | first | get name) , type: ($row | first | get type), value: ($row | get value | str join ' ') } } | sort-by name +} + +export def delete-analyse-tasks [] { + to csv | ^xs delete-analyse-tasks +} + +export def delete-all-analyse-tasks [] { + list-analyse-tasks | delete-analyse-tasks +} + +# triggers an re-analyse of a given task +export def restart [task_id?:string@comp-analyse-task-id] { + collect-task-ids $task_id | to csv | ^xs restart +} + +def find-images [] { + fd -Ie pdf -e jpg -e tif -e tiff -e jpeg | lines +} + + +# creates a new analyse task for the provided images +export def analyse [ + ...patterns:string@find-images, # a list of paths or glob pattern for your images (jpg,pdf,tiff) + --squash # if set to true create a single analyse task for all images + ] { + let $paths = ($patterns + | each {|p| glob $p } + | reduce -f [] {|it, acc| $acc | append $it} + | each {|p| {path: $p}} + ) + if $squash { + $paths | to csv | ^xs multi-part-analyse | from csv | insert paths ($paths | get path) + } else { + $paths | batch-analyse + } +} + +export def service [path:string@find-images] { + {path:$path} | batch-create-service +} + +export def create-services [pattern:string="*{pdf,jpg,jpeg,tif,tiff,png}", --num:int, --filter(-f):string,--dry-run] { + let input = $in + let files = (if ($input | is-empty) { + glob $pattern + } else { + $input + }) + let filtered = (if $filter != null { + $files | find $filter + } else { + $files + }) + + let final = (if ($num != null) { + $filtered | shuffle | take $num + } else { + $filtered + }) + + if $dry_run { + return $final + } + + + $final | each { |f| {path: $f, name: ($f | path parse | get stem)} } | batch-create-service +} + +export def batch-analyse [--concurrency_limit(-c):int=16, --rate_limit(-r):string="100/1sec"] { + to csv | ^xs -c $concurrency_limit -r $rate_limit batch-analyse | from csv +} + + + +def "parse-date" [] { + let parsed = ($in | parse "{time}[{zone}]" | first) + if $parsed.zone == "UTC" { + return ($parsed.time | into datetime) + } else { + print -e $"WARN: unkown time zone ($parsed.zone)" + return ($parsed.time | into datetime) + } +} + +export def "list-service" [] { + ^xs list-service | from json +} + +export alias la = list-analyse-tasks +export alias da = delete-analyse-tasks +export alias a = analyse + +export def "list-analyse-tasks" [] { + ^xs list-analyse-tasks | from csv + | reject "tenant" + | update createdAt {|row index| $row.createdAt | into datetime } + | update modifiedAt {|row index| $row.modifiedAt | into datetime } + | insert duration_secs {|row index | ($row.modifiedAt - $row.createdAt) / 1sec } +} \ No newline at end of file