tx · EagziTUuatBGN4yrywpoprrknF46urgJ1q46iavxr1Ng

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.04.28 12:50 [3082549] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "EagziTUuatBGN4yrywpoprrknF46urgJ1q46iavxr1Ng", "fee": 1000000, "feeAssetId": null, "timestamp": 1714297898147, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "4ngVfQtVHW4o297Y1CdpAiN4Qn2mKg7VyeMFMUbGn3vTyvETqx8twFZh162EB6Kiv2KK6QD6hHt1vEzQLipYJ1bP" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAgAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAACSmxCQAETAAAAAIAAAAAAAAJKpwFAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAAAGUfUJAARMAAAAAgAAAAAAAAZSLQUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP///////AwVCQAETAAAAAIA///////2TQsFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAAMtcUJAARMAAAAAgD///////JPigUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////+i8GBQAAAANuaWwBAAAACmV4cF9hcHByb3gAAAABAAAAAXgEAAAABXNjYWxlAAAAAAAAAYagAwkAAGYAAAACCQAAaAAAAAIA//////////oFAAAABXNjYWxlBQAAAAF4AAAAAAAAAAABAwkAAGYAAAACBQAAAAF4CQAAaAAAAAIAAAAAAAAAAAYFAAAABXNjYWxlAAAAAAAAAYafBAAAAAxjb2VmZmljaWVudHMJAARMAAAAAgkABRQAAAACAAAAAAAAAOpgAAAAAAAAAYafCQAETAAAAAIJAAUUAAAAAgAAAAAAAADDUAAAAAAAAAFzGAkABEwAAAACCQAFFAAAAAIAAAAAAAAAnEAAAAAAAAABX5AJAARMAAAAAgkABRQAAAACAAAAAAAAAHUwAAAAAAAAAUwICQAETAAAAAIJAAUUAAAAAgAAAAAAAABOIAAAAAAAAAE4gAkABEwAAAACCQAFFAAAAAIAAAAAAAAAJxAAAAAAAAABJPgJAARMAAAAAgkABRQAAAACAAAAAAAAAAAAAAAAAAAAARFwCQAETAAAAAIJAAUUAAAAAgD////////Y8AAAAAAAAAD96AkABEwAAAACCQAFFAAAAAIA////////seAAAAAAAAAA6mAJAARMAAAAAgkABRQAAAACAP///////4rQAAAAAAAAANbYCQAETAAAAAIJAAUUAAAAAgD///////9jwAAAAAAAAADDUAkABEwAAAACCQAFFAAAAAIA////////PLAAAAAAAAAAr8gJAARMAAAAAgkABRQAAAACAP///////xWgAAAAAAAAAJxABQAAAANuaWwEAAAABWluZGV4CQAAaQAAAAIJAABkAAAAAgUAAAABeAAAAAAAAADqYAAAAAAAAAAnEAQAAAAJJHQwOTIwOTU3CQABkQAAAAIFAAAADGNvZWZmaWNpZW50cwUAAAAFaW5kZXgEAAAABmJsYWJsYQgFAAAACSR0MDkyMDk1NwAAAAJfMQQAAAABeQgFAAAACSR0MDkyMDk1NwAAAAJfMgUAAAABeQEAAAAHc2lnbW9pZAAAAAIAAAABegAAAAtkZWJ1Z1ByZWZpeAQAAAAEYmFzZQAAAAAAAAGGoAQAAAAJcG9zaXRpdmVaAwkAAGYAAAACAAAAAAAAAAAABQAAAAF6CQEAAAABLQAAAAEFAAAAAXoFAAAAAXoEAAAACGV4cFZhbHVlCQEAAAAKZXhwX2FwcHJveAAAAAEFAAAACXBvc2l0aXZlWgQAAAAIc2lnVmFsdWUJAABpAAAAAgkAAGgAAAACBQAAAARiYXNlBQAAAAhleHBWYWx1ZQkAAGQAAAACBQAAAARiYXNlBQAAAAhleHBWYWx1ZQkABRQAAAACCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAlwb3NpdGl2ZVoFAAAACXBvc2l0aXZlWgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgkAASwAAAACBQAAAAtkZWJ1Z1ByZWZpeAIAAAAIZXhwVmFsdWUFAAAACGV4cFZhbHVlCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAhzaWdWYWx1ZQUAAAAIc2lnVmFsdWUFAAAAA25pbAUAAAAIc2lnVmFsdWUBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAAAAAVpbnB1dAAAAAd3ZWlnaHRzAAAABmJpYXNlcwAAAAtkZWJ1Z1ByZWZpeAQAAAAEc3VtMAkAAGQAAAACCQAAZAAAAAIJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAAJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAEJAABoAAAAAgkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAAAAAAAAAABhqAEAAAABHN1bTEJAABkAAAAAgkAAGQAAAACCQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAAACQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAABCQAAaAAAAAIJAAGRAAAAAgUAAAAGYmlhc2VzAAAAAAAAAAABAAAAAAAAAYagBAAAAAskdDAxNzg5MTg0MgkBAAAAB3NpZ21vaWQAAAACBQAAAARzdW0wAgAAAAhMYXllcjFOMAQAAAANZGVidWdFbnRyaWVzMAgFAAAACyR0MDE3ODkxODQyAAAAAl8xBAAAAARzaWcwCAUAAAALJHQwMTc4OTE4NDIAAAACXzIEAAAACyR0MDE4NDcxOTAwCQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTECAAAACExheWVyMU4xBAAAAA1kZWJ1Z0VudHJpZXMxCAUAAAALJHQwMTg0NzE5MDAAAAACXzEEAAAABHNpZzEIBQAAAAskdDAxODQ3MTkwMAAAAAJfMgQAAAAJZGVidWdJbmZvCQAETgAAAAIFAAAADWRlYnVnRW50cmllczAFAAAADWRlYnVnRW50cmllczEEAAAABm91dHB1dAkABEwAAAACBQAAAARzaWcwCQAETAAAAAIFAAAABHNpZzEFAAAAA25pbAkABRQAAAACBQAAAAlkZWJ1Z0luZm8FAAAABm91dHB1dAEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAAEAAAABWlucHV0AAAAB3dlaWdodHMAAAAGYmlhc2VzAAAAC2RlYnVnUHJlZml4BAAAAARzdW0wCQAAZAAAAAIJAABkAAAAAgkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAAkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAQkAAGgAAAACCQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAAAAAAAAAAGGoAQAAAALJHQwMjIxMDIyNjMJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMAIAAAAITGF5ZXIyTjAEAAAADWRlYnVnRW50cmllczAIBQAAAAskdDAyMjEwMjI2MwAAAAJfMQQAAAAEc2lnMAgFAAAACyR0MDIyMTAyMjYzAAAAAl8yBAAAAAlkZWJ1Z0luZm8FAAAADWRlYnVnRW50cmllczAEAAAABm91dHB1dAUAAAAEc2lnMAkABRQAAAACBQAAAAlkZWJ1Z0luZm8FAAAABm91dHB1dAAAAAEAAAABaQEAAAAHcHJlZGljdAAAAAIAAAAGaW5wdXQxAAAABmlucHV0MgQAAAAMc2NhbGVkSW5wdXQxAwkAAAAAAAACBQAAAAZpbnB1dDEAAAAAAAAAAAEAAAAAAAAPQkAAAAAAAAAAAAAEAAAADHNjYWxlZElucHV0MgMJAAAAAAAAAgUAAAAGaW5wdXQyAAAAAAAAAAABAAAAAAAAD0JAAAAAAAAAAAAABAAAAAZpbnB1dHMJAARMAAAAAgUAAAAMc2NhbGVkSW5wdXQxCQAETAAAAAIFAAAADHNjYWxlZElucHV0MgUAAAADbmlsBAAAAAskdDAyNTc1MjY3MwkBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAUAAAAGaW5wdXRzBQAAAA1sYXllcjFXZWlnaHRzBQAAAAxsYXllcjFCaWFzZXMCAAAABkxheWVyMQQAAAALZGVidWdMYXllcjEIBQAAAAskdDAyNTc1MjY3MwAAAAJfMQQAAAAMbGF5ZXIxT3V0cHV0CAUAAAALJHQwMjU3NTI2NzMAAAACXzIEAAAACyR0MDI2NzgyNzgyCQEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAAEBQAAAAxsYXllcjFPdXRwdXQFAAAADWxheWVyMldlaWdodHMFAAAADGxheWVyMkJpYXNlcwIAAAAGTGF5ZXIyBAAAAAtkZWJ1Z0xheWVyMggFAAAACyR0MDI2NzgyNzgyAAAAAl8xBAAAAAxsYXllcjJPdXRwdXQIBQAAAAskdDAyNjc4Mjc4MgAAAAJfMgkABE4AAAACCQAETgAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAICAAAABnJlc3VsdAUAAAAMbGF5ZXIyT3V0cHV0BQAAAANuaWwFAAAAC2RlYnVnTGF5ZXIxBQAAAAtkZWJ1Z0xheWVyMgAAAADY8rlu", "height": 3082549, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: 6V35pBudomPg2sNkERM1SV5yreXyCvfUjZxcnWEAwv9b Next: 2b25PWcsfJJqtzYdx5PHEx5557PpxguKVWbbsMW8ffgA Diff:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600733], [414197, 414252]]
4+let layer1Weights = [[600497, 600732], [414197, 414253]]
55
6-let layer1Biases = [-259050, -635638]
6+let layer1Biases = [-259051, -635637]
77
8-let layer2Weights = [[832966, -897141]]
8+let layer2Weights = [[832965, -897142]]
99
10-let layer2Biases = [-381179]
10+let layer2Biases = [-381178]
1111
1212 func exp_approx (x) = {
1313 let scale = 100000
1414 if (((-6 * scale) > x))
1515 then 1
1616 else if ((x > (6 * scale)))
17- then scale
17+ then 99999
1818 else {
19- let coefficients = [$Tuple2(60000, (scale - 1)), $Tuple2(50000, (scale - 2)), $Tuple2(40000, (scale - 3)), $Tuple2(30000, (scale - 10)), $Tuple2(20000, (scale - 20)), $Tuple2(10000, (scale - 30)), $Tuple2(0, scale), $Tuple2(-10000, (scale + 30)), $Tuple2(-20000, (scale + 20)), $Tuple2(-30000, (scale + 10)), $Tuple2(-40000, (scale + 3)), $Tuple2(-50000, (scale + 2)), $Tuple2(-60000, (scale + 1))]
19+ let coefficients = [$Tuple2(60000, 99999), $Tuple2(50000, 95000), $Tuple2(40000, 90000), $Tuple2(30000, 85000), $Tuple2(20000, 80000), $Tuple2(10000, 75000), $Tuple2(0, 70000), $Tuple2(-10000, 65000), $Tuple2(-20000, 60000), $Tuple2(-30000, 55000), $Tuple2(-40000, 50000), $Tuple2(-50000, 45000), $Tuple2(-60000, 40000)]
2020 let index = ((x + 60000) / 10000)
21- let $t0926968 = coefficients[index]
22- let coefficient = $t0926968._1
23- let y = $t0926968._2
21+ let $t0920957 = coefficients[index]
22+ let blabla = $t0920957._1
23+ let y = $t0920957._2
2424 y
2525 }
2626 }
3232 then -(z)
3333 else z
3434 let expValue = exp_approx(positiveZ)
35- let sigValue = (base - ((base * base) / (base + expValue)))
35+ let sigValue = ((base * expValue) / (base + expValue))
3636 $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
3737 }
3838
4040 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
4141 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
4242 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
43- let $t018331886 = sigmoid(sum0, "Layer1N0")
44- let debugEntries0 = $t018331886._1
45- let sig0 = $t018331886._2
46- let $t018911944 = sigmoid(sum1, "Layer1N1")
47- let debugEntries1 = $t018911944._1
48- let sig1 = $t018911944._2
43+ let $t017891842 = sigmoid(sum0, "Layer1N0")
44+ let debugEntries0 = $t017891842._1
45+ let sig0 = $t017891842._2
46+ let $t018471900 = sigmoid(sum1, "Layer1N1")
47+ let debugEntries1 = $t018471900._1
48+ let sig1 = $t018471900._2
4949 let debugInfo = (debugEntries0 ++ debugEntries1)
5050 let output = [sig0, sig1]
5151 $Tuple2(debugInfo, output)
5454
5555 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
5656 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
57- let $t022542307 = sigmoid(sum0, "Layer2N0")
58- let debugEntries0 = $t022542307._1
59- let sig0 = $t022542307._2
57+ let $t022102263 = sigmoid(sum0, "Layer2N0")
58+ let debugEntries0 = $t022102263._1
59+ let sig0 = $t022102263._2
6060 let debugInfo = debugEntries0
6161 let output = sig0
6262 $Tuple2(debugInfo, output)
7272 then 1000000
7373 else 0
7474 let inputs = [scaledInput1, scaledInput2]
75- let $t026192717 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
76- let debugLayer1 = $t026192717._1
77- let layer1Output = $t026192717._2
78- let $t027222826 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
79- let debugLayer2 = $t027222826._1
80- let layer2Output = $t027222826._2
75+ let $t025752673 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
76+ let debugLayer1 = $t025752673._1
77+ let layer1Output = $t025752673._2
78+ let $t026782782 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
79+ let debugLayer2 = $t026782782._1
80+ let layer2Output = $t026782782._2
8181 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
8282 }
8383
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600733], [414197, 414252]]
4+let layer1Weights = [[600497, 600732], [414197, 414253]]
55
6-let layer1Biases = [-259050, -635638]
6+let layer1Biases = [-259051, -635637]
77
8-let layer2Weights = [[832966, -897141]]
8+let layer2Weights = [[832965, -897142]]
99
10-let layer2Biases = [-381179]
10+let layer2Biases = [-381178]
1111
1212 func exp_approx (x) = {
1313 let scale = 100000
1414 if (((-6 * scale) > x))
1515 then 1
1616 else if ((x > (6 * scale)))
17- then scale
17+ then 99999
1818 else {
19- let coefficients = [$Tuple2(60000, (scale - 1)), $Tuple2(50000, (scale - 2)), $Tuple2(40000, (scale - 3)), $Tuple2(30000, (scale - 10)), $Tuple2(20000, (scale - 20)), $Tuple2(10000, (scale - 30)), $Tuple2(0, scale), $Tuple2(-10000, (scale + 30)), $Tuple2(-20000, (scale + 20)), $Tuple2(-30000, (scale + 10)), $Tuple2(-40000, (scale + 3)), $Tuple2(-50000, (scale + 2)), $Tuple2(-60000, (scale + 1))]
19+ let coefficients = [$Tuple2(60000, 99999), $Tuple2(50000, 95000), $Tuple2(40000, 90000), $Tuple2(30000, 85000), $Tuple2(20000, 80000), $Tuple2(10000, 75000), $Tuple2(0, 70000), $Tuple2(-10000, 65000), $Tuple2(-20000, 60000), $Tuple2(-30000, 55000), $Tuple2(-40000, 50000), $Tuple2(-50000, 45000), $Tuple2(-60000, 40000)]
2020 let index = ((x + 60000) / 10000)
21- let $t0926968 = coefficients[index]
22- let coefficient = $t0926968._1
23- let y = $t0926968._2
21+ let $t0920957 = coefficients[index]
22+ let blabla = $t0920957._1
23+ let y = $t0920957._2
2424 y
2525 }
2626 }
2727
2828
2929 func sigmoid (z,debugPrefix) = {
3030 let base = 100000
3131 let positiveZ = if ((0 > z))
3232 then -(z)
3333 else z
3434 let expValue = exp_approx(positiveZ)
35- let sigValue = (base - ((base * base) / (base + expValue)))
35+ let sigValue = ((base * expValue) / (base + expValue))
3636 $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
3737 }
3838
3939
4040 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
4141 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
4242 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
43- let $t018331886 = sigmoid(sum0, "Layer1N0")
44- let debugEntries0 = $t018331886._1
45- let sig0 = $t018331886._2
46- let $t018911944 = sigmoid(sum1, "Layer1N1")
47- let debugEntries1 = $t018911944._1
48- let sig1 = $t018911944._2
43+ let $t017891842 = sigmoid(sum0, "Layer1N0")
44+ let debugEntries0 = $t017891842._1
45+ let sig0 = $t017891842._2
46+ let $t018471900 = sigmoid(sum1, "Layer1N1")
47+ let debugEntries1 = $t018471900._1
48+ let sig1 = $t018471900._2
4949 let debugInfo = (debugEntries0 ++ debugEntries1)
5050 let output = [sig0, sig1]
5151 $Tuple2(debugInfo, output)
5252 }
5353
5454
5555 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
5656 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
57- let $t022542307 = sigmoid(sum0, "Layer2N0")
58- let debugEntries0 = $t022542307._1
59- let sig0 = $t022542307._2
57+ let $t022102263 = sigmoid(sum0, "Layer2N0")
58+ let debugEntries0 = $t022102263._1
59+ let sig0 = $t022102263._2
6060 let debugInfo = debugEntries0
6161 let output = sig0
6262 $Tuple2(debugInfo, output)
6363 }
6464
6565
6666 @Callable(i)
6767 func predict (input1,input2) = {
6868 let scaledInput1 = if ((input1 == 1))
6969 then 1000000
7070 else 0
7171 let scaledInput2 = if ((input2 == 1))
7272 then 1000000
7373 else 0
7474 let inputs = [scaledInput1, scaledInput2]
75- let $t026192717 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
76- let debugLayer1 = $t026192717._1
77- let layer1Output = $t026192717._2
78- let $t027222826 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
79- let debugLayer2 = $t027222826._1
80- let layer2Output = $t027222826._2
75+ let $t025752673 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
76+ let debugLayer1 = $t025752673._1
77+ let layer1Output = $t025752673._2
78+ let $t026782782 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
79+ let debugLayer2 = $t026782782._1
80+ let layer2Output = $t026782782._2
8181 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
8282 }
8383
8484

github/deemru/w8io/6500d08 
43.53 ms