tx · ES7iTxSeRwykCCYgMLEpVdKbM3mXJqP9tGkfdPysM26x

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.04.28 15:11 [3082693] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "ES7iTxSeRwykCCYgMLEpVdKbM3mXJqP9tGkfdPysM26x", "fee": 1000000, "feeAssetId": null, "timestamp": 1714306298056, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "2rig8TW743hiayJ4vYmecXPbLSRTrCXVBbjR68RR8fNt3HcFKbijTMYqH9XTRBFhs566zUN7dd2WoFxHM3M77tJp" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAgAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAACSmxCQAETAAAAAIAAAAAAAAJKp0FAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAAAGUfUJAARMAAAAAgAAAAAAAAZSLQUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP///////AwVCQAETAAAAAIA///////2TQsFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAAMtcYJAARMAAAAAgD///////JPigUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////+i8FBQAAAANuaWwBAAAACWV4cEFwcHJveAAAAAEAAAABeAQAAAAHc2NhbGVkWAkAAGsAAAADBQAAAAF4AAAAAAAAAAABAAAAAAAAAAAKBAAAAAhzY2FsZWRYMgkAAGsAAAADBQAAAAdzY2FsZWRYBQAAAAdzY2FsZWRYAAAAAAAAAAAKBAAAAAV0ZXJtMQkAAGUAAAACAAAAAAAAAAAKBQAAAAdzY2FsZWRYBAAAAAV0ZXJtMgkAAGsAAAADBQAAAAhzY2FsZWRYMgAAAAAAAAAABQAAAAAAAAAAAQkAAGQAAAACBQAAAAV0ZXJtMQUAAAAFdGVybTIBAAAAB3NpZ21vaWQAAAACAAAAAXoAAAALZGVidWdQcmVmaXgEAAAAB2V4cE5lZ1oJAQAAAAlleHBBcHByb3gAAAABCQEAAAABLQAAAAEFAAAAAXoEAAAADm9uZVBsdXNFeHBOZWdaCQAAZAAAAAIAAAAAAAAAAAoFAAAAB2V4cE5lZ1oEAAAACHNpZ1ZhbHVlCQAAawAAAAMAAAAAAAAAAAoFAAAADm9uZVBsdXNFeHBOZWdaAAAAAAAAAAABCQAFFAAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAABmlucHV0WgUAAAABegkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgkAASwAAAACBQAAAAtkZWJ1Z1ByZWZpeAIAAAAHZXhwTmVnWgUAAAAHZXhwTmVnWgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgkAASwAAAACBQAAAAtkZWJ1Z1ByZWZpeAIAAAAOb25lUGx1c0V4cE5lZ1oFAAAADm9uZVBsdXNFeHBOZWdaCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAhzaWdWYWx1ZQUAAAAIc2lnVmFsdWUFAAAAA25pbAUAAAAIc2lnVmFsdWUBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAAAAAVpbnB1dAAAAAd3ZWlnaHRzAAAABmJpYXNlcwAAAAtkZWJ1Z1ByZWZpeAQAAAAEc3VtMAkAAGQAAAACCQAAZAAAAAIJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAAJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAEJAAGRAAAAAgUAAAAGYmlhc2VzAAAAAAAAAAAABAAAAARzdW0xCQAAZAAAAAIJAABkAAAAAgkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAQAAAAAAAAAAAAkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAQAAAAAAAAAAAQkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAEEAAAACyR0MDEyOTAxMzQzCQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTACAAAACExheWVyMU4wBAAAAA1kZWJ1Z0VudHJpZXMwCAUAAAALJHQwMTI5MDEzNDMAAAACXzEEAAAABHNpZzAIBQAAAAskdDAxMjkwMTM0MwAAAAJfMgQAAAALJHQwMTM0ODE0MDEJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMQIAAAAITGF5ZXIxTjEEAAAADWRlYnVnRW50cmllczEIBQAAAAskdDAxMzQ4MTQwMQAAAAJfMQQAAAAEc2lnMQgFAAAACyR0MDEzNDgxNDAxAAAAAl8yBAAAAAlkZWJ1Z0luZm8JAAROAAAAAgUAAAANZGVidWdFbnRyaWVzMAUAAAANZGVidWdFbnRyaWVzMQQAAAAGb3V0cHV0CQAETAAAAAIFAAAABHNpZzAJAARMAAAAAgUAAAAEc2lnMQUAAAADbmlsCQAFFAAAAAIFAAAACWRlYnVnSW5mbwUAAAAGb3V0cHV0AQAAABFmb3J3YXJkUGFzc0xheWVyMgAAAAQAAAAFaW5wdXQAAAAHd2VpZ2h0cwAAAAZiaWFzZXMAAAALZGVidWdQcmVmaXgEAAAABHN1bTAJAABkAAAAAgkAAGQAAAACCQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAAACQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAABCQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAAQAAAALJHQwMTcwMjE3NTUJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMAIAAAAITGF5ZXIyTjAEAAAADWRlYnVnRW50cmllczAIBQAAAAskdDAxNzAyMTc1NQAAAAJfMQQAAAAEc2lnMAgFAAAACyR0MDE3MDIxNzU1AAAAAl8yBAAAAAlkZWJ1Z0luZm8FAAAADWRlYnVnRW50cmllczAEAAAABm91dHB1dAUAAAAEc2lnMAkABRQAAAACBQAAAAlkZWJ1Z0luZm8FAAAABm91dHB1dAAAAAEAAAABaQEAAAAHcHJlZGljdAAAAAIAAAAGaW5wdXQxAAAABmlucHV0MgQAAAAMc2NhbGVkSW5wdXQxAwkAAAAAAAACBQAAAAZpbnB1dDEAAAAAAAAAAAEAAAAAAAAAAAEAAAAAAAAAAAAEAAAADHNjYWxlZElucHV0MgMJAAAAAAAAAgUAAAAGaW5wdXQyAAAAAAAAAAABAAAAAAAAAAABAAAAAAAAAAAABAAAAAZpbnB1dHMJAARMAAAAAgUAAAAMc2NhbGVkSW5wdXQxCQAETAAAAAIFAAAADHNjYWxlZElucHV0MgUAAAADbmlsBAAAAAskdDAyMDU1MjE1MwkBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAUAAAAGaW5wdXRzBQAAAA1sYXllcjFXZWlnaHRzBQAAAAxsYXllcjFCaWFzZXMCAAAABkxheWVyMQQAAAALZGVidWdMYXllcjEIBQAAAAskdDAyMDU1MjE1MwAAAAJfMQQAAAAMbGF5ZXIxT3V0cHV0CAUAAAALJHQwMjA1NTIxNTMAAAACXzIEAAAACyR0MDIxNTgyMjYyCQEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAAEBQAAAAxsYXllcjFPdXRwdXQFAAAADWxheWVyMldlaWdodHMFAAAADGxheWVyMkJpYXNlcwIAAAAGTGF5ZXIyBAAAAAtkZWJ1Z0xheWVyMggFAAAACyR0MDIxNTgyMjYyAAAAAl8xBAAAAAxsYXllcjJPdXRwdXQIBQAAAAskdDAyMTU4MjI2MgAAAAJfMgkABE4AAAACCQAETgAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAICAAAABnJlc3VsdAUAAAAMbGF5ZXIyT3V0cHV0BQAAAANuaWwFAAAAC2RlYnVnTGF5ZXIxBQAAAAtkZWJ1Z0xheWVyMgAAAAB+pDno", "height": 3082693, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: 3VfJiTQQ4gm1w42zeiKmk1fzLaxCoRabH3w4e7NAmWgj Next: 9TNLmNWabdFsX6poDq4zXG3jVPsTx2rkDGxqQChk5qUw Diff:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600732], [414197, 414253]]
4+let layer1Weights = [[600497, 600733], [414197, 414253]]
55
6-let layer1Biases = [-259050, -635637]
6+let layer1Biases = [-259051, -635637]
77
88 let layer2Weights = [[832966, -897142]]
99
1010 let layer2Biases = [-381179]
1111
1212 func expApprox (x) = {
13- let scaledX = fraction(x, 1, 1000)
14- let scaledX2 = fraction(scaledX, scaledX, 1000)
15- let term1 = (1000 - scaledX)
16- let term2 = fraction(scaledX2, 500, 1)
13+ let scaledX = fraction(x, 1, 10)
14+ let scaledX2 = fraction(scaledX, scaledX, 10)
15+ let term1 = (10 - scaledX)
16+ let term2 = fraction(scaledX2, 5, 1)
1717 (term1 + term2)
1818 }
1919
2020
2121 func sigmoid (z,debugPrefix) = {
2222 let expNegZ = expApprox(-(z))
23- let onePlusExpNegZ = (1000 + expNegZ)
24- let sigValue = fraction(1000, onePlusExpNegZ, 1)
23+ let onePlusExpNegZ = (10 + expNegZ)
24+ let sigValue = fraction(10, onePlusExpNegZ, 1)
2525 $Tuple2([IntegerEntry((debugPrefix + "inputZ"), z), IntegerEntry((debugPrefix + "expNegZ"), expNegZ), IntegerEntry((debugPrefix + "onePlusExpNegZ"), onePlusExpNegZ), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
2626 }
2727
2929 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
3030 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + biases[0])
3131 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + biases[1])
32- let $t012951348 = sigmoid(sum0, "Layer1N0")
33- let debugEntries0 = $t012951348._1
34- let sig0 = $t012951348._2
35- let $t013531406 = sigmoid(sum1, "Layer1N1")
36- let debugEntries1 = $t013531406._1
37- let sig1 = $t013531406._2
32+ let $t012901343 = sigmoid(sum0, "Layer1N0")
33+ let debugEntries0 = $t012901343._1
34+ let sig0 = $t012901343._2
35+ let $t013481401 = sigmoid(sum1, "Layer1N1")
36+ let debugEntries1 = $t013481401._1
37+ let sig1 = $t013481401._2
3838 let debugInfo = (debugEntries0 ++ debugEntries1)
3939 let output = [sig0, sig1]
4040 $Tuple2(debugInfo, output)
4343
4444 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
4545 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + biases[0])
46- let $t017071760 = sigmoid(sum0, "Layer2N0")
47- let debugEntries0 = $t017071760._1
48- let sig0 = $t017071760._2
46+ let $t017021755 = sigmoid(sum0, "Layer2N0")
47+ let debugEntries0 = $t017021755._1
48+ let sig0 = $t017021755._2
4949 let debugInfo = debugEntries0
5050 let output = sig0
5151 $Tuple2(debugInfo, output)
6161 then 1
6262 else 0
6363 let inputs = [scaledInput1, scaledInput2]
64- let $t020602158 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
65- let debugLayer1 = $t020602158._1
66- let layer1Output = $t020602158._2
67- let $t021632267 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
68- let debugLayer2 = $t021632267._1
69- let layer2Output = $t021632267._2
64+ let $t020552153 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
65+ let debugLayer1 = $t020552153._1
66+ let layer1Output = $t020552153._2
67+ let $t021582262 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
68+ let debugLayer2 = $t021582262._1
69+ let layer2Output = $t021582262._2
7070 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
7171 }
7272
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600732], [414197, 414253]]
4+let layer1Weights = [[600497, 600733], [414197, 414253]]
55
6-let layer1Biases = [-259050, -635637]
6+let layer1Biases = [-259051, -635637]
77
88 let layer2Weights = [[832966, -897142]]
99
1010 let layer2Biases = [-381179]
1111
1212 func expApprox (x) = {
13- let scaledX = fraction(x, 1, 1000)
14- let scaledX2 = fraction(scaledX, scaledX, 1000)
15- let term1 = (1000 - scaledX)
16- let term2 = fraction(scaledX2, 500, 1)
13+ let scaledX = fraction(x, 1, 10)
14+ let scaledX2 = fraction(scaledX, scaledX, 10)
15+ let term1 = (10 - scaledX)
16+ let term2 = fraction(scaledX2, 5, 1)
1717 (term1 + term2)
1818 }
1919
2020
2121 func sigmoid (z,debugPrefix) = {
2222 let expNegZ = expApprox(-(z))
23- let onePlusExpNegZ = (1000 + expNegZ)
24- let sigValue = fraction(1000, onePlusExpNegZ, 1)
23+ let onePlusExpNegZ = (10 + expNegZ)
24+ let sigValue = fraction(10, onePlusExpNegZ, 1)
2525 $Tuple2([IntegerEntry((debugPrefix + "inputZ"), z), IntegerEntry((debugPrefix + "expNegZ"), expNegZ), IntegerEntry((debugPrefix + "onePlusExpNegZ"), onePlusExpNegZ), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
2626 }
2727
2828
2929 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
3030 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + biases[0])
3131 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + biases[1])
32- let $t012951348 = sigmoid(sum0, "Layer1N0")
33- let debugEntries0 = $t012951348._1
34- let sig0 = $t012951348._2
35- let $t013531406 = sigmoid(sum1, "Layer1N1")
36- let debugEntries1 = $t013531406._1
37- let sig1 = $t013531406._2
32+ let $t012901343 = sigmoid(sum0, "Layer1N0")
33+ let debugEntries0 = $t012901343._1
34+ let sig0 = $t012901343._2
35+ let $t013481401 = sigmoid(sum1, "Layer1N1")
36+ let debugEntries1 = $t013481401._1
37+ let sig1 = $t013481401._2
3838 let debugInfo = (debugEntries0 ++ debugEntries1)
3939 let output = [sig0, sig1]
4040 $Tuple2(debugInfo, output)
4141 }
4242
4343
4444 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
4545 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + biases[0])
46- let $t017071760 = sigmoid(sum0, "Layer2N0")
47- let debugEntries0 = $t017071760._1
48- let sig0 = $t017071760._2
46+ let $t017021755 = sigmoid(sum0, "Layer2N0")
47+ let debugEntries0 = $t017021755._1
48+ let sig0 = $t017021755._2
4949 let debugInfo = debugEntries0
5050 let output = sig0
5151 $Tuple2(debugInfo, output)
5252 }
5353
5454
5555 @Callable(i)
5656 func predict (input1,input2) = {
5757 let scaledInput1 = if ((input1 == 1))
5858 then 1
5959 else 0
6060 let scaledInput2 = if ((input2 == 1))
6161 then 1
6262 else 0
6363 let inputs = [scaledInput1, scaledInput2]
64- let $t020602158 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
65- let debugLayer1 = $t020602158._1
66- let layer1Output = $t020602158._2
67- let $t021632267 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
68- let debugLayer2 = $t021632267._1
69- let layer2Output = $t021632267._2
64+ let $t020552153 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
65+ let debugLayer1 = $t020552153._1
66+ let layer1Output = $t020552153._2
67+ let $t021582262 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
68+ let debugLayer2 = $t021582262._1
69+ let layer2Output = $t021582262._2
7070 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
7171 }
7272
7373

github/deemru/w8io/6500d08 
47.90 ms