tx · 2b25PWcsfJJqtzYdx5PHEx5557PpxguKVWbbsMW8ffgA

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.04.28 12:59 [3082559] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "2b25PWcsfJJqtzYdx5PHEx5557PpxguKVWbbsMW8ffgA", "fee": 1000000, "feeAssetId": null, "timestamp": 1714298417301, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "ytP45QFEDTrqFyfLANC1rSgWhF5bLrBzSacJxfDsz42hh5A7wbPWW4jgcF8QDdm54LH2BUTH2QUcMHnVKB6kc4m" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAcAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAACSmwCQAETAAAAAIAAAAAAAAJKp0FAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAAAGUfQJAARMAAAAAgAAAAAAAAZSLQUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP///////AwVCQAETAAAAAIA///////2TQsFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAAMtcUJAARMAAAAAgD///////JPigUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////+i8FBQAAAANuaWwBAAAAB3NpZ21vaWQAAAACAAAAAXoAAAALZGVidWdQcmVmaXgEAAAAAWUAAAAAAAApekkEAAAABGJhc2UAAAAAAAAPQkAEAAAACXBvc2l0aXZlWgMJAABmAAAAAgAAAAAAAAAAAAUAAAABegkBAAAAAS0AAAABBQAAAAF6BQAAAAF6BAAAAAdzY2FsZWRaCQAAaQAAAAIFAAAACXBvc2l0aXZlWgAAAAAAAAAnEAQAAAAHZXhwUGFydAkAAGsAAAADBQAAAAFlBQAAAARiYXNlBQAAAAdzY2FsZWRaBAAAAAhzaWdWYWx1ZQkAAGsAAAADBQAAAARiYXNlCQAAZAAAAAIFAAAABGJhc2UFAAAAB2V4cFBhcnQFAAAABGJhc2UJAAUUAAAAAgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgkAASwAAAACBQAAAAtkZWJ1Z1ByZWZpeAIAAAAJcG9zaXRpdmVaBQAAAAlwb3NpdGl2ZVoJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAAB2V4cFBhcnQFAAAAB2V4cFBhcnQJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAACHNpZ1ZhbHVlBQAAAAhzaWdWYWx1ZQUAAAADbmlsBQAAAAhzaWdWYWx1ZQEAAAARZm9yd2FyZFBhc3NMYXllcjEAAAAEAAAABWlucHV0AAAAB3dlaWdodHMAAAAGYmlhc2VzAAAAC2RlYnVnUHJlZml4BAAAAARzdW0wCQAAZAAAAAIJAABkAAAAAgkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAAkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAQkAAGgAAAACCQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAAAAAAAAAAGGoAQAAAAEc3VtMQkAAGQAAAACCQAAZAAAAAIJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAEAAAAAAAAAAAAJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAEAAAAAAAAAAAEJAABoAAAAAgkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAEAAAAAAAABhqAEAAAACyR0MDExMzkxMTkyCQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTACAAAACExheWVyMU4wBAAAAA1kZWJ1Z0VudHJpZXMwCAUAAAALJHQwMTEzOTExOTIAAAACXzEEAAAABHNpZzAIBQAAAAskdDAxMTM5MTE5MgAAAAJfMgQAAAALJHQwMTE5NzEyNTAJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMQIAAAAITGF5ZXIxTjEEAAAADWRlYnVnRW50cmllczEIBQAAAAskdDAxMTk3MTI1MAAAAAJfMQQAAAAEc2lnMQgFAAAACyR0MDExOTcxMjUwAAAAAl8yBAAAAAlkZWJ1Z0luZm8JAAROAAAAAgUAAAANZGVidWdFbnRyaWVzMAUAAAANZGVidWdFbnRyaWVzMQQAAAAGb3V0cHV0CQAETAAAAAIFAAAABHNpZzAJAARMAAAAAgUAAAAEc2lnMQUAAAADbmlsCQAFFAAAAAIFAAAACWRlYnVnSW5mbwUAAAAGb3V0cHV0AQAAABFmb3J3YXJkUGFzc0xheWVyMgAAAAQAAAAFaW5wdXQAAAAHd2VpZ2h0cwAAAAZiaWFzZXMAAAALZGVidWdQcmVmaXgEAAAABHN1bTAJAABkAAAAAgkAAGQAAAACCQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAAACQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAABCQAAaAAAAAIJAAGRAAAAAgUAAAAGYmlhc2VzAAAAAAAAAAAAAAAAAAAAAYagBAAAAAskdDAxNTYwMTYxMwkBAAAAB3NpZ21vaWQAAAACBQAAAARzdW0wAgAAAAhMYXllcjJOMAQAAAANZGVidWdFbnRyaWVzMAgFAAAACyR0MDE1NjAxNjEzAAAAAl8xBAAAAARzaWcwCAUAAAALJHQwMTU2MDE2MTMAAAACXzIEAAAACWRlYnVnSW5mbwUAAAANZGVidWdFbnRyaWVzMAQAAAAGb3V0cHV0BQAAAARzaWcwCQAFFAAAAAIFAAAACWRlYnVnSW5mbwUAAAAGb3V0cHV0AAAAAQAAAAFpAQAAAAdwcmVkaWN0AAAAAgAAAAZpbnB1dDEAAAAGaW5wdXQyBAAAAAxzY2FsZWRJbnB1dDEDCQAAAAAAAAIFAAAABmlucHV0MQAAAAAAAAAAAQAAAAAAAA9CQAAAAAAAAAAAAAQAAAAMc2NhbGVkSW5wdXQyAwkAAAAAAAACBQAAAAZpbnB1dDIAAAAAAAAAAAEAAAAAAAAPQkAAAAAAAAAAAAAEAAAABmlucHV0cwkABEwAAAACBQAAAAxzY2FsZWRJbnB1dDEJAARMAAAAAgUAAAAMc2NhbGVkSW5wdXQyBQAAAANuaWwEAAAACyR0MDE5MjUyMDIzCQEAAAARZm9yd2FyZFBhc3NMYXllcjEAAAAEBQAAAAZpbnB1dHMFAAAADWxheWVyMVdlaWdodHMFAAAADGxheWVyMUJpYXNlcwIAAAAGTGF5ZXIxBAAAAAtkZWJ1Z0xheWVyMQgFAAAACyR0MDE5MjUyMDIzAAAAAl8xBAAAAAxsYXllcjFPdXRwdXQIBQAAAAskdDAxOTI1MjAyMwAAAAJfMgQAAAALJHQwMjAyODIxMzIJAQAAABFmb3J3YXJkUGFzc0xheWVyMgAAAAQFAAAADGxheWVyMU91dHB1dAUAAAANbGF5ZXIyV2VpZ2h0cwUAAAAMbGF5ZXIyQmlhc2VzAgAAAAZMYXllcjIEAAAAC2RlYnVnTGF5ZXIyCAUAAAALJHQwMjAyODIxMzIAAAACXzEEAAAADGxheWVyMk91dHB1dAgFAAAACyR0MDIwMjgyMTMyAAAAAl8yCQAETgAAAAIJAAROAAAAAgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgIAAAAGcmVzdWx0BQAAAAxsYXllcjJPdXRwdXQFAAAAA25pbAUAAAALZGVidWdMYXllcjEFAAAAC2RlYnVnTGF5ZXIyAAAAAN7D/2o=", "height": 3082559, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: EagziTUuatBGN4yrywpoprrknF46urgJ1q46iavxr1Ng Next: 7s1h3jYoYnAwi8pPGmy5HUUL4Ybuoz6K6kVB7GwRyRrV Diff:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600732], [414197, 414253]]
4+let layer1Weights = [[600496, 600733], [414196, 414253]]
55
66 let layer1Biases = [-259051, -635637]
77
88 let layer2Weights = [[832965, -897142]]
99
10-let layer2Biases = [-381178]
11-
12-func exp_approx (x) = {
13- let scale = 100000
14- if (((-6 * scale) > x))
15- then 1
16- else if ((x > (6 * scale)))
17- then 99999
18- else {
19- let coefficients = [$Tuple2(60000, 99999), $Tuple2(50000, 95000), $Tuple2(40000, 90000), $Tuple2(30000, 85000), $Tuple2(20000, 80000), $Tuple2(10000, 75000), $Tuple2(0, 70000), $Tuple2(-10000, 65000), $Tuple2(-20000, 60000), $Tuple2(-30000, 55000), $Tuple2(-40000, 50000), $Tuple2(-50000, 45000), $Tuple2(-60000, 40000)]
20- let index = ((x + 60000) / 10000)
21- let $t0920957 = coefficients[index]
22- let blabla = $t0920957._1
23- let y = $t0920957._2
24- y
25- }
26- }
27-
10+let layer2Biases = [-381179]
2811
2912 func sigmoid (z,debugPrefix) = {
30- let base = 100000
13+ let e = 2718281
14+ let base = 1000000
3115 let positiveZ = if ((0 > z))
3216 then -(z)
3317 else z
34- let expValue = exp_approx(positiveZ)
35- let sigValue = ((base * expValue) / (base + expValue))
36- $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
18+ let scaledZ = (positiveZ / 10000)
19+ let expPart = fraction(e, base, scaledZ)
20+ let sigValue = fraction(base, (base + expPart), base)
21+ $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expPart"), expPart), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
3722 }
3823
3924
4025 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
4126 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
4227 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
43- let $t017891842 = sigmoid(sum0, "Layer1N0")
44- let debugEntries0 = $t017891842._1
45- let sig0 = $t017891842._2
46- let $t018471900 = sigmoid(sum1, "Layer1N1")
47- let debugEntries1 = $t018471900._1
48- let sig1 = $t018471900._2
28+ let $t011391192 = sigmoid(sum0, "Layer1N0")
29+ let debugEntries0 = $t011391192._1
30+ let sig0 = $t011391192._2
31+ let $t011971250 = sigmoid(sum1, "Layer1N1")
32+ let debugEntries1 = $t011971250._1
33+ let sig1 = $t011971250._2
4934 let debugInfo = (debugEntries0 ++ debugEntries1)
5035 let output = [sig0, sig1]
5136 $Tuple2(debugInfo, output)
5439
5540 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
5641 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
57- let $t022102263 = sigmoid(sum0, "Layer2N0")
58- let debugEntries0 = $t022102263._1
59- let sig0 = $t022102263._2
42+ let $t015601613 = sigmoid(sum0, "Layer2N0")
43+ let debugEntries0 = $t015601613._1
44+ let sig0 = $t015601613._2
6045 let debugInfo = debugEntries0
6146 let output = sig0
6247 $Tuple2(debugInfo, output)
7257 then 1000000
7358 else 0
7459 let inputs = [scaledInput1, scaledInput2]
75- let $t025752673 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
76- let debugLayer1 = $t025752673._1
77- let layer1Output = $t025752673._2
78- let $t026782782 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
79- let debugLayer2 = $t026782782._1
80- let layer2Output = $t026782782._2
60+ let $t019252023 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
61+ let debugLayer1 = $t019252023._1
62+ let layer1Output = $t019252023._2
63+ let $t020282132 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
64+ let debugLayer2 = $t020282132._1
65+ let layer2Output = $t020282132._2
8166 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
8267 }
8368
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600732], [414197, 414253]]
4+let layer1Weights = [[600496, 600733], [414196, 414253]]
55
66 let layer1Biases = [-259051, -635637]
77
88 let layer2Weights = [[832965, -897142]]
99
10-let layer2Biases = [-381178]
11-
12-func exp_approx (x) = {
13- let scale = 100000
14- if (((-6 * scale) > x))
15- then 1
16- else if ((x > (6 * scale)))
17- then 99999
18- else {
19- let coefficients = [$Tuple2(60000, 99999), $Tuple2(50000, 95000), $Tuple2(40000, 90000), $Tuple2(30000, 85000), $Tuple2(20000, 80000), $Tuple2(10000, 75000), $Tuple2(0, 70000), $Tuple2(-10000, 65000), $Tuple2(-20000, 60000), $Tuple2(-30000, 55000), $Tuple2(-40000, 50000), $Tuple2(-50000, 45000), $Tuple2(-60000, 40000)]
20- let index = ((x + 60000) / 10000)
21- let $t0920957 = coefficients[index]
22- let blabla = $t0920957._1
23- let y = $t0920957._2
24- y
25- }
26- }
27-
10+let layer2Biases = [-381179]
2811
2912 func sigmoid (z,debugPrefix) = {
30- let base = 100000
13+ let e = 2718281
14+ let base = 1000000
3115 let positiveZ = if ((0 > z))
3216 then -(z)
3317 else z
34- let expValue = exp_approx(positiveZ)
35- let sigValue = ((base * expValue) / (base + expValue))
36- $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
18+ let scaledZ = (positiveZ / 10000)
19+ let expPart = fraction(e, base, scaledZ)
20+ let sigValue = fraction(base, (base + expPart), base)
21+ $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expPart"), expPart), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
3722 }
3823
3924
4025 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
4126 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
4227 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
43- let $t017891842 = sigmoid(sum0, "Layer1N0")
44- let debugEntries0 = $t017891842._1
45- let sig0 = $t017891842._2
46- let $t018471900 = sigmoid(sum1, "Layer1N1")
47- let debugEntries1 = $t018471900._1
48- let sig1 = $t018471900._2
28+ let $t011391192 = sigmoid(sum0, "Layer1N0")
29+ let debugEntries0 = $t011391192._1
30+ let sig0 = $t011391192._2
31+ let $t011971250 = sigmoid(sum1, "Layer1N1")
32+ let debugEntries1 = $t011971250._1
33+ let sig1 = $t011971250._2
4934 let debugInfo = (debugEntries0 ++ debugEntries1)
5035 let output = [sig0, sig1]
5136 $Tuple2(debugInfo, output)
5237 }
5338
5439
5540 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
5641 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
57- let $t022102263 = sigmoid(sum0, "Layer2N0")
58- let debugEntries0 = $t022102263._1
59- let sig0 = $t022102263._2
42+ let $t015601613 = sigmoid(sum0, "Layer2N0")
43+ let debugEntries0 = $t015601613._1
44+ let sig0 = $t015601613._2
6045 let debugInfo = debugEntries0
6146 let output = sig0
6247 $Tuple2(debugInfo, output)
6348 }
6449
6550
6651 @Callable(i)
6752 func predict (input1,input2) = {
6853 let scaledInput1 = if ((input1 == 1))
6954 then 1000000
7055 else 0
7156 let scaledInput2 = if ((input2 == 1))
7257 then 1000000
7358 else 0
7459 let inputs = [scaledInput1, scaledInput2]
75- let $t025752673 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
76- let debugLayer1 = $t025752673._1
77- let layer1Output = $t025752673._2
78- let $t026782782 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
79- let debugLayer2 = $t026782782._1
80- let layer2Output = $t026782782._2
60+ let $t019252023 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
61+ let debugLayer1 = $t019252023._1
62+ let layer1Output = $t019252023._2
63+ let $t020282132 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
64+ let debugLayer2 = $t020282132._1
65+ let layer2Output = $t020282132._2
8166 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
8267 }
8368
8469

github/deemru/w8io/6500d08 
48.51 ms