Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Embed scripts for reward system #1888

Merged
merged 8 commits into from
Oct 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 51 additions & 0 deletions cmd/skywire-cli/commands/rewards/getlogs.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
#!/usr/bin/bash
timeout 30.0m unbuffer skywire-cli log --minv v1.3.21 -s $(tail -n1 survey-wl.conf) | tee skywire-cli-log.txt
#echo -e "skywire survey and transport log collection $(date)\n\n$(cat skywire-cli-log.txt)\n"
echo -e "skywire survey and transport log collection $(date)\n\n$(cat skywire-cli-log.txt)\n" | tee skywire-cli-log0.txt >> /dev/null
echo "finished "$(date) | tee -a skywire-cli-log0.txt
mv skywire-cli-log0.txt skywire-cli-log.txt

#Delete json files more than 1 week old
find log_backups/*/*.json -type f -mmin +$((168 * 60)) -delete
find log_collecting/*/*.json -type f -mmin +$((168 * 60)) -delete
#remove empty files and dirs
find log_collecting/*/ -empty -type f -delete && printf "removed empty files... \n" || true
find log_collecting/*/ -type f -size 19c -delete && printf "removed files with http 404 errors... \n" || true
find log_collecting/*/ -type f -size 18c -delete && printf "removed files with http 404 errors... \n" || true
find log_collecting/* -empty -type d -delete && printf "removed empty dirs... \n" || true
find log_backups/*/ -empty -type f -delete && printf "removed empty files... \n" || true
find log_backups/*/ -type f -size 19c -delete && printf "removed files with http 404 errors... \n" || true
find log_backups/* -empty -type d -delete && printf "removed empty dirs... \n" || true
#for ((i=1; i<=($(date -d "$(date +%m)/$(date +%d)/$(date +%Y)" +%j)); i++)); do find log_collecting/*/ -type f -name $(date -d "01/01/2023 +$((i-1)) days" +'%Y-%m-%d' | awk '{print $0}').csv | while read _file ; do [[ $(head -n 1 $_file) == *"tp_id,recv,sent,time_stamp"* ]] && sed -i '1d' $_file ; done ; done
#for ((i=1; i<=($(date -d "$(date +%m)/$(date +%d)/$(date +%Y)" +%j)); i++)); do find log_backups/*/ -type f -name $(date -d "01/01/2023 +$((i-1)) days" +'%Y-%m-%d' | awk '{print $0}').csv | while read _file ; do [[ $(head -n 1 $_file) == *"tp_id,recv,sent,time_stamp"* ]] && sed -i '1d' $_file ; done ; done
#for ((i=1; i<=($(date -d "$(date +%m)/$(date +%d)/$(date +%Y)" +%j)); i++)); do find log_collecting/*/ -type f -name $(date -d "01/01/2023 +$((i-1)) days" +'%Y-%m-%d' | awk '{print $0}').csv -print | xargs grep -l "404 page not found" | parallel rm
#for ((i=1; i<=($(date -d "$(date +%m)/$(date +%d)/$(date +%Y)" +%j)); i++)); do find log_backups/*/ -type f -name $(date -d "01/01/2023 +$((i-1)) days" +'%Y-%m-%d' | awk '{print $0}').csv | xargs grep -l "404 page not found" | parallel rm

find log_collecting/*/$(date +'%Y-%m-%d').csv -type f -print | while read _file ; do [[ $(head -n 1 $_file) == *"tp_id,recv,sent,time_stamp"* ]] && sed -i '1d' $_file ; done || true
find log_collecting/*/$(date --date="yesterday" +'%Y-%m-%d').csv -type f -print | while read _file ; do [[ $(head -n 1 $_file) == *"tp_id,recv,sent,time_stamp"* ]] && sed -i '1d' $_file ; done || true
find log_backups/*/$(date +'%Y-%m-%d').csv -type f -print | while read _file ; do [[ $(head -n 1 $_file) == *"tp_id,recv,sent,time_stamp"* ]] && sed -i '1d' $_file ; done || true
find log_backups/*/$(date --date="yesterday" +'%Y-%m-%d').csv -type f -print | while read _file ; do [[ $(head -n 1 $_file) == *"tp_id,recv,sent,time_stamp"* ]] && sed -i '1d' $_file ; done || true
find log_collecting/*/*.json -type f -print | while read _file; do if ! jq '.' "$_file" >/dev/null 2>&1; then echo "invalid json $_file" ; rm $_file; fi; done
find log_backups/*/*.json -type f -print | while read _file; do if ! jq '.' "$_file" >/dev/null 2>&1; then echo "invalid json $_file" ; rm $_file; fi; done

printf "checking tp logs... \n"
[[ -f log_collecting/*/$(date +'%Y-%m-%d').csv ]] && find log_collecting/*/$(date +'%Y-%m-%d').csv -type f -print | xargs grep -l "404 page not found" | parallel rm || true
[[ -f log_collecting/*/$(date --date="yesterday" +'%Y-%m-%d').csv ]] && find log_collecting/*/$(date --date="yesterday" +'%Y-%m-%d').csv -type f -print | xargs grep -l "404 page not found" | parallel rm || true
[[ -f log_collecting/*/$(date +'%Y-%m-%d').csv ]] && find log_collecting/*/$(date +'%Y-%m-%d').csv -type f -print | xargs grep -l "Not Found" | parallel rm || true
[[ -f log_collecting/*/log_collecting/*/$(date --date="yesterday" +'%Y-%m-%d').csv ]] && find log_collecting/*/$(date --date="yesterday" +'%Y-%m-%d').csv -type f -print | xargs grep -l "Not Found" | parallel rm || true
[[ -f log_collecting/*/$(date +'%Y-%m-%d').csv ]] && find log_backups/*/$(date +'%Y-%m-%d').csv -type f -print | xargs grep -l "404 page not found" | parallel rm || true
[[ -f log_collecting/*/log_collecting/*/$(date --date="yesterday" +'%Y-%m-%d').csv ]] && find log_backups/*/$(date --date="yesterday" +'%Y-%m-%d').csv -type f -print | xargs grep -l "404 page not found" | parallel rm || true
[[ -f log_collecting/*/$(date +'%Y-%m-%d').csv ]] && find log_backups/*/$(date +'%Y-%m-%d').csv -type f -print | xargs grep -l "Not Found" | parallel rm || true
[[ -f log_collecting/*/log_collecting/*/$(date --date="yesterday" +'%Y-%m-%d').csv ]] && find log_backups/*/$(date --date="yesterday" +'%Y-%m-%d').csv -type f -print | xargs grep -l "Not Found" | parallel rm || true


#back up the collected files
rsync -r log_collecting/ log_backups || true
[[ -f log_backups/*/*~ ]] && rm log_backups/*/*~ || true

#update the addresses in the csv
#[[ -f ip-sky-pk-new.csv ]] && rm ip-sky-pk-new.csv
#find log_backups/*/node-info.json -type f | parallel "./newsky.sh {}"
#cat ip-sky-pk.csv | parallel "./updsky.sh {}"
#mv ip-sky-pk-new.csv ip-sky-pk.csv
#cat skywire-cli-log.txt
45 changes: 45 additions & 0 deletions cmd/skywire-cli/commands/rewards/reward.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
#!/bin/bash
########## Skywire reward processing and calculation script reward.sh ##########
# Author: Moses Narrow
################################################################################
## Files:
#date_ineligible.csv account of non rewarded visors
#date_rewardtxn0.csv reward transaction CSV
#date_shares.csv reward shares CSV
#date_stats.txt statistical data
#date_ut.json backup of uptime tracker data (7 days of UT data)
#date_ut.txt $ skywire cli ut > date_ut.txt
#date.txt transaction ID of reward distribution transaction - indicates rewards sent if exists
################################################################################
# Prevent running this script when rewards have already been distributed
[[ -f hist/$(date --date="yesterday" +%Y-%m-%d).txt ]] && echo "Transaction already broadcasted for yesterday" && exit 0
# Determine the date for which to calculate rewards
# based on the last file containing the reward transaction that exists
# (i.e. 2023-05-01.txt)
###uncomment the below line to do historic calculations
#[[ -z $_wdate ]] && _wdate="$(date -d "$(find hist/????-??-??.txt | tail -n1 | cut -d '/' -f2 | cut -d '.' -f1) +1 day" "+%Y-%m-%d")"
###comment the below line to do historic calculations
[[ ! -f hist/$(date --date="yesterday" +%Y-%m-%d).txt ]] && _wdate=$(date --date="yesterday" +%Y-%m-%d)
## OR specify a date like yesterday ##
#_wdate=$(date --date="yesterday" +%Y-%m-%d) ./reward.sh

####################################################
skywire cli ut --cfu "hist/${_wdate}_ut.json" | tee "hist/${_wdate}_ut.txt"
# New reward pool starts November 2nd, 2024
if [[ $(date +%s) -lt $(date -d "2024-11-02" +%s) ]]; then
#echo "The date is before November 2nd, 2024."
#v1.3.29 - two reward pools - exclude pool 2
skywire cli rewards --utfile "hist/${_wdate}_ut.json" -x "" -ed ${_wdate} -p log_backups | tee hist/${_wdate}_ineligible.csv
skywire cli rewards --utfile "hist/${_wdate}_ut.json" -x "" -20d ${_wdate} -p log_backups | tee hist/${_wdate}_shares.csv
skywire cli rewards --utfile "hist/${_wdate}_ut.json" -x "" -10d $(date --date="yesterday" +%Y-%m-%d) -p log_backups | tee hist/${_wdate}_rewardtxn0.csv
skywire cli rewards --utfile "hist/${_wdate}_ut.json" -x "" -12d ${_wdate} -p log_backups | tee hist/${_wdate}_stats.txt
else
#echo "The date is on or after November 2nd, 2024."
#v1.3.29 - two reward pools - include pool 2
skywire cli rewards --utfile "hist/${_wdate}_ut.json" -ed ${_wdate} -p log_backups | tee hist/${_wdate}_ineligible.csv
skywire cli rewards --utfile "hist/${_wdate}_ut.json" -20d ${_wdate} -p log_backups | tee hist/${_wdate}_shares.csv
skywire cli rewards --utfile "hist/${_wdate}_ut.json" -10d $(date --date="yesterday" +%Y-%m-%d) -p log_backups | tee hist/${_wdate}_rewardtxn0.csv
skywire cli rewards --utfile "hist/${_wdate}_ut.json" -12d ${_wdate} -p log_backups | tee hist/${_wdate}_stats.txt
fi
#return
exit 0
54 changes: 53 additions & 1 deletion cmd/skywire-cli/commands/rewards/services.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import (
"bytes"
_ "embed"
"fmt"
"os"
"os/user"
Expand All @@ -13,6 +14,57 @@
"github.com/spf13/cobra"
)

//go:embed reward.sh
var rewardSH []byte

//go:embed getlogs.sh
var getlogsSH []byte

const testSH = `#!/bin/bash
echo "Hello World"`

var (
getlogssh bool
rewardsh bool
testsh bool
)

func init() {
RootCmd.AddCommand(
scriptCmd,
)
scriptCmd.Flags().BoolVarP(&getlogssh, "getlogs", "g", false, "print getlogs.sh")
scriptCmd.Flags().BoolVarP(&rewardsh, "reward", "r", false, "print reward.sh")
scriptCmd.Flags().BoolVarP(&testsh, "test", "t", false, "print test.sh")
scriptCmd.Flags().MarkHidden("test") //nolint

}

var scriptCmd = &cobra.Command{
Use: "script",
Short: "print reward system scripts",
Long: `Print the reward system scripts. Pipe to bash to execute.
$ skywire cli rewards script -t | bash
Hello World`,
Run: func(_ *cobra.Command, _ []string) {
if getlogssh && rewardsh {
log.Fatal("mutually exclusive flags")
}
if getlogssh {
fmt.Println(string(getlogsSH))
os.Exit(0)
}
if rewardsh {
fmt.Println(string(rewardSH))
os.Exit(0)
}
if testsh {
fmt.Println(string(testSH))
os.Exit(0)
}
},
}

var (
userName string
workingDir string
Expand All @@ -34,7 +86,7 @@
if err != nil {
log.Fatal(err)
}
stat := fileInfo.Sys().(*syscall.Stat_t)

Check failure on line 89 in cmd/skywire-cli/commands/rewards/services.go

View workflow job for this annotation

GitHub Actions / windows

undefined: syscall.Stat_t
owner, err := user.LookupId(fmt.Sprint(stat.Uid))
if err != nil {
log.Fatal(err)
Expand Down Expand Up @@ -148,7 +200,7 @@
Type=simple
User={{.User}}
WorkingDirectory={{.Dir}}/rewards
ExecStart=/bin/bash -c './getlogs.sh && ./reward.sh ; exit 0'
ExecStart=/bin/bash -c 'skywire cli rewards script -g | bash && skywire cli rewards script -r | bash ; exit 0'

[Install]
WantedBy=multi-user.target
Expand Down
2 changes: 1 addition & 1 deletion cmd/skywire-cli/commands/rewards/ui.go
Original file line number Diff line number Diff line change
Expand Up @@ -847,7 +847,7 @@ func server() {
l += "\n\nIneligible:\n"
for _, line := range l2 {
thispk, _ := script.Echo(line).Column(2).String() //nolint
reason, _ := script.Echo(line).Column(3).String() //nolint
reason, _ := script.Echo(line).Column(3).String() //nolint
invalid, _ := script.Echo(line).Match(", , , ,").String() //nolint
if invalid != "" {
_, err = script.IfExists("rewards/log_backups/" + thispk + "/node-info.json").Echo("").String()
Expand Down
Loading