use ~/bin/nu_scripts/ht.nu use ~/bin/nu_scripts/credm.nu use ~/bin/nu_scripts/sc.nu export def actuators [] { http get (ht with-path $env.XS_MANAGEMENT_URL "/actuator") | from json | get _links } export def "script source" [] { let body = $in ht put (ht with-path "http://localhost:8230" "/source") --bearer (sc provide-access-token) } def actuators-names [] { actuators | columns } def metrics-names [] { actuator metrics | get names } export def comp-service-ids [] { xs list-service | select externalId name | rename value description } export def "delete service" [service_id:string@comp-service-ids] { {id: $service_id} | to csv | ^xs delete-service } export def actuator [name:string@actuators-names] { http get (actuators | get $name | get href) | from json } export def metrics [name:string@metrics-names] { http get (ht with-path $env.XS_MANAGEMENT_URL $"/actuator/metrics/($name)") | from json } export def batch-create-service [pattern?:string, --clean] { $in | to csv | ^xs batch-create-service | from csv } export def delete-all-service [] { ^xs delete-all-service } export def "get proposed fields" [] { ^xs proposed-fields | from json } # download the analyse results to file export def "get analyse result" [ analyse_task_id:string@comp-analyse-task-id ] { collect-task-ids $analyse_task_id | to csv | ^xs download-analyse-result } # download the images from analyse result export def "get analyse images" [analyse_task_id?:string@comp-analyse-task-id] { collect-task-ids $analyse_task_id | to csv | ^xs download-analyse-images } def comp-analyse-task-id [] { list-analyse-tasks | get id } def collect-task-ids [task_id?:string] { let tasks = ($in) if ($task_id != null) { [{id: $task_id}] } else { $tasks | select id } } export def "group proposed fields" [] { get fields | flatten | flatten | select name type value | group-by name | values | each { |row| $row | { name: ($row | first | get name) , type: ($row | first | get type), value: ($row | get value | str join ' ') } } | sort-by name } export def delete-analyse-tasks [] { to csv | ^xs delete-analyse-tasks } export def delete-all-analyse-tasks [] { list-analyse-tasks | delete-analyse-tasks } # triggers an re-analyse of a given task export def restart [task_id?:string@comp-analyse-task-id] { collect-task-ids $task_id | to csv | ^xs restart } def find-images [] { fd -Ie pdf -e jpg -e tif -e tiff -e jpeg | lines } # creates a new analyse task for the provided images export def analyse [ ...patterns:string@find-images, # a list of paths or glob pattern for your images (jpg,pdf,tiff) --squash # if set to true create a single analyse task for all images ] { let $paths = ($patterns | each {|p| glob $p } | reduce -f [] {|it, acc| $acc | append $it} | each {|p| {path: $p}} ) if $squash { $paths | to csv | ^xs multi-part-analyse | from csv | insert paths ($paths | get path) } else { $paths | batch-analyse } } export def service [path:string@find-images] { {path:$path} | batch-create-service } export def create-services [pattern:string="*{pdf,jpg,jpeg,tif,tiff,png}", --num:int, --filter(-f):string,--dry-run] { let input = $in let files = (if ($input | is-empty) { glob $pattern } else { $input }) let filtered = (if $filter != null { $files | find $filter } else { $files }) let final = (if ($num != null) { $filtered | shuffle | take $num } else { $filtered }) if $dry_run { return $final } $final | each { |f| {path: $f, name: ($f | path parse | get stem)} } | batch-create-service } export def batch-analyse [--concurrency_limit(-c):int=16, --rate_limit(-r):string="100/1sec"] { to csv | ^xs -c $concurrency_limit -r $rate_limit batch-analyse | from csv } def "parse-date" [] { let parsed = ($in | parse "{time}[{zone}]" | first) if $parsed.zone == "UTC" { return ($parsed.time | into datetime) } else { print -e $"WARN: unkown time zone ($parsed.zone)" return ($parsed.time | into datetime) } } export def "list-service" [] { ^xs list-service | from json } export alias la = list-analyse-tasks export alias da = delete-analyse-tasks export alias a = analyse export def "list-analyse-tasks" [] { ^xs list-analyse-tasks | from csv | reject "tenant" | update createdAt {|row index| $row.createdAt | into datetime } | update modifiedAt {|row index| $row.modifiedAt | into datetime } | insert duration_secs {|row index | ($row.modifiedAt - $row.createdAt) / 1sec } }