tx · Yxm27VmnSiXh83CHjo1DgvTCSkak33fbPnBnSF6qgCS

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.04.28 13:21 [3082584] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "Yxm27VmnSiXh83CHjo1DgvTCSkak33fbPnBnSF6qgCS", "fee": 1000000, "feeAssetId": null, "timestamp": 1714299726158, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "2df9WqWxRfZh4TQjuvx9VvWmYxqWAqwgdPkLWskoXicuNVAgNfu5HFBhU3o2gMvVgLM3K12qpni6hGbMaKVskuNX" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAkAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAACSmxCQAETAAAAAIAAAAAAAAJKp0FAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAAAGUfUJAARMAAAAAgAAAAAAAAZSLQUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP///////AwWCQAETAAAAAIA///////2TQsFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAAMtcUJAARMAAAAAgD///////JPigUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////+i8FBQAAAANuaWwBAAAABmNsYW1wWgAAAAIAAAABegAAAAVsaW1pdAMJAABmAAAAAgUAAAABegUAAAAFbGltaXQFAAAABWxpbWl0AwkAAGYAAAACCQEAAAABLQAAAAEFAAAABWxpbWl0BQAAAAF6CQEAAAABLQAAAAEFAAAABWxpbWl0BQAAAAF6AQAAAApleHBfYXBwcm94AAAAAQAAAAF4BAAAAARiYXNlAAAAAAAAD0JABAAAAAZtYXhFeHAAAAAAAAADDUADCQAAZgAAAAIJAQAAAAEtAAAAAQUAAAAGbWF4RXhwBQAAAAF4AAAAAAAAAAAAAwkAAGYAAAACBQAAAAF4BQAAAAZtYXhFeHAJAABoAAAAAgUAAAAEYmFzZQUAAAAEYmFzZQkAAGkAAAACBQAAAARiYXNlCQAAZAAAAAIAAAAAAAAAAAEJAABpAAAAAgUAAAABeAAAAAAAAAAnEAEAAAAHc2lnbW9pZAAAAAIAAAABegAAAAtkZWJ1Z1ByZWZpeAQAAAAIY2xhbXBlZFoJAQAAAAZjbGFtcFoAAAACBQAAAAF6AAAAAAAAAYagBAAAAAlwb3NpdGl2ZVoDCQAAZgAAAAIAAAAAAAAAAAAFAAAAAXoJAQAAAAEtAAAAAQUAAAABegUAAAABegQAAAAIZXhwVmFsdWUJAQAAAApleHBfYXBwcm94AAAAAQkBAAAAAS0AAAABBQAAAAlwb3NpdGl2ZVoEAAAACHNpZ1ZhbHVlCQAAaQAAAAIAAAAAAAAPQkAJAABkAAAAAgAAAAAAAA9CQAUAAAAIZXhwVmFsdWUJAAUUAAAAAgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgkAASwAAAACBQAAAAtkZWJ1Z1ByZWZpeAIAAAAIY2xhbXBlZFoFAAAACGNsYW1wZWRaCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAlwb3NpdGl2ZVoFAAAACXBvc2l0aXZlWgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgkAASwAAAACBQAAAAtkZWJ1Z1ByZWZpeAIAAAAIZXhwVmFsdWUFAAAACGV4cFZhbHVlCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAhzaWdWYWx1ZQUAAAAIc2lnVmFsdWUFAAAAA25pbAUAAAAIc2lnVmFsdWUBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAAAAAVpbnB1dAAAAAd3ZWlnaHRzAAAABmJpYXNlcwAAAAtkZWJ1Z1ByZWZpeAQAAAAEc3VtMAkAAGQAAAACCQAAZAAAAAIJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAAJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAEJAABoAAAAAgkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAAAAAAAAAABhqAEAAAABHN1bTEJAABkAAAAAgkAAGQAAAACCQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAAACQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAABCQAAaAAAAAIJAAGRAAAAAgUAAAAGYmlhc2VzAAAAAAAAAAABAAAAAAAAAYagBAAAAAskdDAxNTM4MTU5MQkBAAAAB3NpZ21vaWQAAAACBQAAAARzdW0wAgAAAAhMYXllcjFOMAQAAAANZGVidWdFbnRyaWVzMAgFAAAACyR0MDE1MzgxNTkxAAAAAl8xBAAAAARzaWcwCAUAAAALJHQwMTUzODE1OTEAAAACXzIEAAAACyR0MDE1OTYxNjQ5CQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTECAAAACExheWVyMU4xBAAAAA1kZWJ1Z0VudHJpZXMxCAUAAAALJHQwMTU5NjE2NDkAAAACXzEEAAAABHNpZzEIBQAAAAskdDAxNTk2MTY0OQAAAAJfMgQAAAAJZGVidWdJbmZvCQAETgAAAAIFAAAADWRlYnVnRW50cmllczAFAAAADWRlYnVnRW50cmllczEEAAAABm91dHB1dAkABEwAAAACBQAAAARzaWcwCQAETAAAAAIFAAAABHNpZzEFAAAAA25pbAkABRQAAAACBQAAAAlkZWJ1Z0luZm8FAAAABm91dHB1dAEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAAEAAAABWlucHV0AAAAB3dlaWdodHMAAAAGYmlhc2VzAAAAC2RlYnVnUHJlZml4BAAAAARzdW0wCQAAZAAAAAIJAABkAAAAAgkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAAkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAQkAAGgAAAACCQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAAAAAAAAAAGGoAQAAAALJHQwMTk1OTIwMTIJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMAIAAAAITGF5ZXIyTjAEAAAADWRlYnVnRW50cmllczAIBQAAAAskdDAxOTU5MjAxMgAAAAJfMQQAAAAEc2lnMAgFAAAACyR0MDE5NTkyMDEyAAAAAl8yBAAAAAlkZWJ1Z0luZm8FAAAADWRlYnVnRW50cmllczAEAAAABm91dHB1dAUAAAAEc2lnMAkABRQAAAACBQAAAAlkZWJ1Z0luZm8FAAAABm91dHB1dAAAAAEAAAABaQEAAAAHcHJlZGljdAAAAAIAAAAGaW5wdXQxAAAABmlucHV0MgQAAAAMc2NhbGVkSW5wdXQxAwkAAAAAAAACBQAAAAZpbnB1dDEAAAAAAAAAAAEAAAAAAAAPQkAAAAAAAAAAAAAEAAAADHNjYWxlZElucHV0MgMJAAAAAAAAAgUAAAAGaW5wdXQyAAAAAAAAAAABAAAAAAAAD0JAAAAAAAAAAAAABAAAAAZpbnB1dHMJAARMAAAAAgUAAAAMc2NhbGVkSW5wdXQxCQAETAAAAAIFAAAADHNjYWxlZElucHV0MgUAAAADbmlsBAAAAAskdDAyMzI0MjQyMgkBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAUAAAAGaW5wdXRzBQAAAA1sYXllcjFXZWlnaHRzBQAAAAxsYXllcjFCaWFzZXMCAAAABkxheWVyMQQAAAALZGVidWdMYXllcjEIBQAAAAskdDAyMzI0MjQyMgAAAAJfMQQAAAAMbGF5ZXIxT3V0cHV0CAUAAAALJHQwMjMyNDI0MjIAAAACXzIEAAAACyR0MDI0MjcyNTMxCQEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAAEBQAAAAxsYXllcjFPdXRwdXQFAAAADWxheWVyMldlaWdodHMFAAAADGxheWVyMkJpYXNlcwIAAAAGTGF5ZXIyBAAAAAtkZWJ1Z0xheWVyMggFAAAACyR0MDI0MjcyNTMxAAAAAl8xBAAAAAxsYXllcjJPdXRwdXQIBQAAAAskdDAyNDI3MjUzMQAAAAJfMgkABE4AAAACCQAETgAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAICAAAABnJlc3VsdAUAAAAMbGF5ZXIyT3V0cHV0BQAAAANuaWwFAAAAC2RlYnVnTGF5ZXIxBQAAAAtkZWJ1Z0xheWVyMgAAAADGfPsH", "height": 3082584, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: 6zJ8QRcPZvRhEXXc7Bhy38mZu4RMyVgBXbvcTVw8gMMR Next: EUk6SP8KWsjHF1LQH8TKBsex2FAUFa7uDWqcmJfKSMsf Diff:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600732], [414197, 414253]]
4+let layer1Weights = [[600497, 600733], [414197, 414253]]
55
6-let layer1Biases = [-259051, -635638]
6+let layer1Biases = [-259050, -635637]
77
8-let layer2Weights = [[832966, -897142]]
8+let layer2Weights = [[832965, -897142]]
99
1010 let layer2Biases = [-381179]
1111
1818
1919 func exp_approx (x) = {
2020 let base = 1000000
21- let scaledX = (x / 10000)
22- if ((0 > x))
23- then (base / (1 + (base * -(scaledX))))
24- else (1 + (base * scaledX))
21+ let maxExp = 200000
22+ if ((-(maxExp) > x))
23+ then 0
24+ else if ((x > maxExp))
25+ then (base * base)
26+ else (base / (1 + (x / 10000)))
2527 }
2628
2729
3941 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
4042 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
4143 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
42- let $t015261579 = sigmoid(sum0, "Layer1N0")
43- let debugEntries0 = $t015261579._1
44- let sig0 = $t015261579._2
45- let $t015841637 = sigmoid(sum1, "Layer1N1")
46- let debugEntries1 = $t015841637._1
47- let sig1 = $t015841637._2
44+ let $t015381591 = sigmoid(sum0, "Layer1N0")
45+ let debugEntries0 = $t015381591._1
46+ let sig0 = $t015381591._2
47+ let $t015961649 = sigmoid(sum1, "Layer1N1")
48+ let debugEntries1 = $t015961649._1
49+ let sig1 = $t015961649._2
4850 let debugInfo = (debugEntries0 ++ debugEntries1)
4951 let output = [sig0, sig1]
5052 $Tuple2(debugInfo, output)
5355
5456 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
5557 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
56- let $t019472000 = sigmoid(sum0, "Layer2N0")
57- let debugEntries0 = $t019472000._1
58- let sig0 = $t019472000._2
58+ let $t019592012 = sigmoid(sum0, "Layer2N0")
59+ let debugEntries0 = $t019592012._1
60+ let sig0 = $t019592012._2
5961 let debugInfo = debugEntries0
6062 let output = sig0
6163 $Tuple2(debugInfo, output)
7173 then 1000000
7274 else 0
7375 let inputs = [scaledInput1, scaledInput2]
74- let $t023122410 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
75- let debugLayer1 = $t023122410._1
76- let layer1Output = $t023122410._2
77- let $t024152519 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
78- let debugLayer2 = $t024152519._1
79- let layer2Output = $t024152519._2
76+ let $t023242422 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
77+ let debugLayer1 = $t023242422._1
78+ let layer1Output = $t023242422._2
79+ let $t024272531 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
80+ let debugLayer2 = $t024272531._1
81+ let layer2Output = $t024272531._2
8082 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
8183 }
8284
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600732], [414197, 414253]]
4+let layer1Weights = [[600497, 600733], [414197, 414253]]
55
6-let layer1Biases = [-259051, -635638]
6+let layer1Biases = [-259050, -635637]
77
8-let layer2Weights = [[832966, -897142]]
8+let layer2Weights = [[832965, -897142]]
99
1010 let layer2Biases = [-381179]
1111
1212 func clampZ (z,limit) = if ((z > limit))
1313 then limit
1414 else if ((-(limit) > z))
1515 then -(limit)
1616 else z
1717
1818
1919 func exp_approx (x) = {
2020 let base = 1000000
21- let scaledX = (x / 10000)
22- if ((0 > x))
23- then (base / (1 + (base * -(scaledX))))
24- else (1 + (base * scaledX))
21+ let maxExp = 200000
22+ if ((-(maxExp) > x))
23+ then 0
24+ else if ((x > maxExp))
25+ then (base * base)
26+ else (base / (1 + (x / 10000)))
2527 }
2628
2729
2830 func sigmoid (z,debugPrefix) = {
2931 let clampedZ = clampZ(z, 100000)
3032 let positiveZ = if ((0 > z))
3133 then -(z)
3234 else z
3335 let expValue = exp_approx(-(positiveZ))
3436 let sigValue = (1000000 / (1000000 + expValue))
3537 $Tuple2([IntegerEntry((debugPrefix + "clampedZ"), clampedZ), IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
3638 }
3739
3840
3941 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
4042 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
4143 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
42- let $t015261579 = sigmoid(sum0, "Layer1N0")
43- let debugEntries0 = $t015261579._1
44- let sig0 = $t015261579._2
45- let $t015841637 = sigmoid(sum1, "Layer1N1")
46- let debugEntries1 = $t015841637._1
47- let sig1 = $t015841637._2
44+ let $t015381591 = sigmoid(sum0, "Layer1N0")
45+ let debugEntries0 = $t015381591._1
46+ let sig0 = $t015381591._2
47+ let $t015961649 = sigmoid(sum1, "Layer1N1")
48+ let debugEntries1 = $t015961649._1
49+ let sig1 = $t015961649._2
4850 let debugInfo = (debugEntries0 ++ debugEntries1)
4951 let output = [sig0, sig1]
5052 $Tuple2(debugInfo, output)
5153 }
5254
5355
5456 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
5557 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
56- let $t019472000 = sigmoid(sum0, "Layer2N0")
57- let debugEntries0 = $t019472000._1
58- let sig0 = $t019472000._2
58+ let $t019592012 = sigmoid(sum0, "Layer2N0")
59+ let debugEntries0 = $t019592012._1
60+ let sig0 = $t019592012._2
5961 let debugInfo = debugEntries0
6062 let output = sig0
6163 $Tuple2(debugInfo, output)
6264 }
6365
6466
6567 @Callable(i)
6668 func predict (input1,input2) = {
6769 let scaledInput1 = if ((input1 == 1))
6870 then 1000000
6971 else 0
7072 let scaledInput2 = if ((input2 == 1))
7173 then 1000000
7274 else 0
7375 let inputs = [scaledInput1, scaledInput2]
74- let $t023122410 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
75- let debugLayer1 = $t023122410._1
76- let layer1Output = $t023122410._2
77- let $t024152519 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
78- let debugLayer2 = $t024152519._1
79- let layer2Output = $t024152519._2
76+ let $t023242422 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
77+ let debugLayer1 = $t023242422._1
78+ let layer1Output = $t023242422._2
79+ let $t024272531 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
80+ let debugLayer2 = $t024272531._1
81+ let layer2Output = $t024272531._2
8082 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
8183 }
8284
8385

github/deemru/w8io/6500d08 
48.22 ms