tx · G1kcT9nY3LG3KisZWKpcB8Yt76ixtTVHpy1AaMFAazRY

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.04.28 14:33 [3082661] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "G1kcT9nY3LG3KisZWKpcB8Yt76ixtTVHpy1AaMFAazRY", "fee": 1000000, "feeAssetId": null, "timestamp": 1714303957362, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "2c917ZXhRC86GFiK7ENU997FciEzhQfcs86WeqVmg1ZambU7uQHV3VeifeqZsa5W1P9MgjkMo5DjQg5DTBksBbCz" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAgAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAACSmxCQAETAAAAAIAAAAAAAAJKp0FAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAAAGUfUJAARMAAAAAgAAAAAAAAZSLQUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP///////AwWCQAETAAAAAIA///////2TQoFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAAMtcYJAARMAAAAAgD///////JPigUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////+i8FBQAAAANuaWwBAAAACWV4cEFwcHJveAAAAAEAAAABeAQAAAAHc2NhbGVkWAkAAGsAAAADBQAAAAF4AAAAAAAAAAABAAAAAAAAD0JABAAAAAhzY2FsZWRYMgkAAGsAAAADBQAAAAdzY2FsZWRYBQAAAAdzY2FsZWRYAAAAAAAAD0JABAAAAAV0ZXJtMQkAAGUAAAACAAAAAAAAD0JABQAAAAdzY2FsZWRYBAAAAAV0ZXJtMgkAAGsAAAADBQAAAAhzY2FsZWRYMgAAAAAAAAehIAAAAAAAAAAAAQkAAGQAAAACBQAAAAV0ZXJtMQUAAAAFdGVybTIBAAAAB3NpZ21vaWQAAAACAAAAAXoAAAALZGVidWdQcmVmaXgEAAAAB2V4cE5lZ1oJAQAAAAlleHBBcHByb3gAAAABCQEAAAABLQAAAAEFAAAAAXoEAAAADm9uZVBsdXNFeHBOZWdaCQAAZAAAAAIAAAAAAAAPQkAFAAAAB2V4cE5lZ1oEAAAACHNpZ1ZhbHVlCQAAawAAAAMAAAAA6NSlEAAFAAAADm9uZVBsdXNFeHBOZWdaAAAAAAAAAAABCQAFFAAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAABmlucHV0WgUAAAABegkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgkAASwAAAACBQAAAAtkZWJ1Z1ByZWZpeAIAAAAHZXhwTmVnWgUAAAAHZXhwTmVnWgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgkAASwAAAACBQAAAAtkZWJ1Z1ByZWZpeAIAAAAOb25lUGx1c0V4cE5lZ1oFAAAADm9uZVBsdXNFeHBOZWdaCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAhzaWdWYWx1ZQUAAAAIc2lnVmFsdWUFAAAAA25pbAUAAAAIc2lnVmFsdWUBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAAAAAVpbnB1dAAAAAd3ZWlnaHRzAAAABmJpYXNlcwAAAAtkZWJ1Z1ByZWZpeAQAAAAEc3VtMAkAAGQAAAACCQAAZAAAAAIJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAAJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAEJAABoAAAAAgkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAAAAAAAAAAPQkAEAAAABHN1bTEJAABkAAAAAgkAAGQAAAACCQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAAACQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAABCQAAaAAAAAIJAAGRAAAAAgUAAAAGYmlhc2VzAAAAAAAAAAABAAAAAAAAD0JABAAAAAskdDAxMzI5MTM4MgkBAAAAB3NpZ21vaWQAAAACBQAAAARzdW0wAgAAAAhMYXllcjFOMAQAAAANZGVidWdFbnRyaWVzMAgFAAAACyR0MDEzMjkxMzgyAAAAAl8xBAAAAARzaWcwCAUAAAALJHQwMTMyOTEzODIAAAACXzIEAAAACyR0MDEzODcxNDQwCQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTECAAAACExheWVyMU4xBAAAAA1kZWJ1Z0VudHJpZXMxCAUAAAALJHQwMTM4NzE0NDAAAAACXzEEAAAABHNpZzEIBQAAAAskdDAxMzg3MTQ0MAAAAAJfMgQAAAAJZGVidWdJbmZvCQAETgAAAAIFAAAADWRlYnVnRW50cmllczAFAAAADWRlYnVnRW50cmllczEEAAAABm91dHB1dAkABEwAAAACBQAAAARzaWcwCQAETAAAAAIFAAAABHNpZzEFAAAAA25pbAkABRQAAAACBQAAAAlkZWJ1Z0luZm8FAAAABm91dHB1dAEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAAEAAAABWlucHV0AAAAB3dlaWdodHMAAAAGYmlhc2VzAAAAC2RlYnVnUHJlZml4BAAAAARzdW0wCQAAZAAAAAIJAABkAAAAAgkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAAkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAQkAAGgAAAACCQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAAAAAAAAAA9CQAQAAAALJHQwMTc1MTE4MDQJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMAIAAAAITGF5ZXIyTjAEAAAADWRlYnVnRW50cmllczAIBQAAAAskdDAxNzUxMTgwNAAAAAJfMQQAAAAEc2lnMAgFAAAACyR0MDE3NTExODA0AAAAAl8yBAAAAAlkZWJ1Z0luZm8FAAAADWRlYnVnRW50cmllczAEAAAABm91dHB1dAUAAAAEc2lnMAkABRQAAAACBQAAAAlkZWJ1Z0luZm8FAAAABm91dHB1dAAAAAEAAAABaQEAAAAHcHJlZGljdAAAAAIAAAAGaW5wdXQxAAAABmlucHV0MgQAAAAMc2NhbGVkSW5wdXQxAwkAAAAAAAACBQAAAAZpbnB1dDEAAAAAAAAAAAEAAAAAAAAPQkAAAAAAAAAAAAAEAAAADHNjYWxlZElucHV0MgMJAAAAAAAAAgUAAAAGaW5wdXQyAAAAAAAAAAABAAAAAAAAD0JAAAAAAAAAAAAABAAAAAZpbnB1dHMJAARMAAAAAgUAAAAMc2NhbGVkSW5wdXQxCQAETAAAAAIFAAAADHNjYWxlZElucHV0MgUAAAADbmlsBAAAAAskdDAyMTE2MjIxNAkBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAUAAAAGaW5wdXRzBQAAAA1sYXllcjFXZWlnaHRzBQAAAAxsYXllcjFCaWFzZXMCAAAABkxheWVyMQQAAAALZGVidWdMYXllcjEIBQAAAAskdDAyMTE2MjIxNAAAAAJfMQQAAAAMbGF5ZXIxT3V0cHV0CAUAAAALJHQwMjExNjIyMTQAAAACXzIEAAAACyR0MDIyMTkyMzIzCQEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAAEBQAAAAxsYXllcjFPdXRwdXQFAAAADWxheWVyMldlaWdodHMFAAAADGxheWVyMkJpYXNlcwIAAAAGTGF5ZXIyBAAAAAtkZWJ1Z0xheWVyMggFAAAACyR0MDIyMTkyMzIzAAAAAl8xBAAAAAxsYXllcjJPdXRwdXQIBQAAAAskdDAyMjE5MjMyMwAAAAJfMgkABE4AAAACCQAETgAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAICAAAABnJlc3VsdAUAAAAMbGF5ZXIyT3V0cHV0BQAAAANuaWwFAAAAC2RlYnVnTGF5ZXIxBQAAAAtkZWJ1Z0xheWVyMgAAAAATV2Q8", "height": 3082661, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: CdzzpSdtWkMdq88oLky7G7s1vveHjZQsJ1rC8WaAeC1X Next: BM99xHVxdCc9EqbXsCkM7ZWSZhTJvHtirzap69CgHG6L Diff:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600732], [414197, 414253]]
4+let layer1Weights = [[600497, 600733], [414197, 414253]]
55
6-let layer1Biases = [-259051, -635637]
6+let layer1Biases = [-259050, -635638]
77
88 let layer2Weights = [[832966, -897142]]
99
2727
2828
2929 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
30- let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
31- let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
32- let $t013271380 = sigmoid(sum0, "Layer1N0")
33- let debugEntries0 = $t013271380._1
34- let sig0 = $t013271380._2
35- let $t013851438 = sigmoid(sum1, "Layer1N1")
36- let debugEntries1 = $t013851438._1
37- let sig1 = $t013851438._2
30+ let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 1000000))
31+ let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 1000000))
32+ let $t013291382 = sigmoid(sum0, "Layer1N0")
33+ let debugEntries0 = $t013291382._1
34+ let sig0 = $t013291382._2
35+ let $t013871440 = sigmoid(sum1, "Layer1N1")
36+ let debugEntries1 = $t013871440._1
37+ let sig1 = $t013871440._2
3838 let debugInfo = (debugEntries0 ++ debugEntries1)
3939 let output = [sig0, sig1]
4040 $Tuple2(debugInfo, output)
4242
4343
4444 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
45- let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
46- let $t017481801 = sigmoid(sum0, "Layer2N0")
47- let debugEntries0 = $t017481801._1
48- let sig0 = $t017481801._2
45+ let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 1000000))
46+ let $t017511804 = sigmoid(sum0, "Layer2N0")
47+ let debugEntries0 = $t017511804._1
48+ let sig0 = $t017511804._2
4949 let debugInfo = debugEntries0
5050 let output = sig0
5151 $Tuple2(debugInfo, output)
6161 then 1000000
6262 else 0
6363 let inputs = [scaledInput1, scaledInput2]
64- let $t021132211 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
65- let debugLayer1 = $t021132211._1
66- let layer1Output = $t021132211._2
67- let $t022162320 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
68- let debugLayer2 = $t022162320._1
69- let layer2Output = $t022162320._2
64+ let $t021162214 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
65+ let debugLayer1 = $t021162214._1
66+ let layer1Output = $t021162214._2
67+ let $t022192323 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
68+ let debugLayer2 = $t022192323._1
69+ let layer2Output = $t022192323._2
7070 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
7171 }
7272
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600732], [414197, 414253]]
4+let layer1Weights = [[600497, 600733], [414197, 414253]]
55
6-let layer1Biases = [-259051, -635637]
6+let layer1Biases = [-259050, -635638]
77
88 let layer2Weights = [[832966, -897142]]
99
1010 let layer2Biases = [-381179]
1111
1212 func expApprox (x) = {
1313 let scaledX = fraction(x, 1, 1000000)
1414 let scaledX2 = fraction(scaledX, scaledX, 1000000)
1515 let term1 = (1000000 - scaledX)
1616 let term2 = fraction(scaledX2, 500000, 1)
1717 (term1 + term2)
1818 }
1919
2020
2121 func sigmoid (z,debugPrefix) = {
2222 let expNegZ = expApprox(-(z))
2323 let onePlusExpNegZ = (1000000 + expNegZ)
2424 let sigValue = fraction(1000000000000, onePlusExpNegZ, 1)
2525 $Tuple2([IntegerEntry((debugPrefix + "inputZ"), z), IntegerEntry((debugPrefix + "expNegZ"), expNegZ), IntegerEntry((debugPrefix + "onePlusExpNegZ"), onePlusExpNegZ), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
2626 }
2727
2828
2929 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
30- let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
31- let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
32- let $t013271380 = sigmoid(sum0, "Layer1N0")
33- let debugEntries0 = $t013271380._1
34- let sig0 = $t013271380._2
35- let $t013851438 = sigmoid(sum1, "Layer1N1")
36- let debugEntries1 = $t013851438._1
37- let sig1 = $t013851438._2
30+ let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 1000000))
31+ let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 1000000))
32+ let $t013291382 = sigmoid(sum0, "Layer1N0")
33+ let debugEntries0 = $t013291382._1
34+ let sig0 = $t013291382._2
35+ let $t013871440 = sigmoid(sum1, "Layer1N1")
36+ let debugEntries1 = $t013871440._1
37+ let sig1 = $t013871440._2
3838 let debugInfo = (debugEntries0 ++ debugEntries1)
3939 let output = [sig0, sig1]
4040 $Tuple2(debugInfo, output)
4141 }
4242
4343
4444 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
45- let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
46- let $t017481801 = sigmoid(sum0, "Layer2N0")
47- let debugEntries0 = $t017481801._1
48- let sig0 = $t017481801._2
45+ let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 1000000))
46+ let $t017511804 = sigmoid(sum0, "Layer2N0")
47+ let debugEntries0 = $t017511804._1
48+ let sig0 = $t017511804._2
4949 let debugInfo = debugEntries0
5050 let output = sig0
5151 $Tuple2(debugInfo, output)
5252 }
5353
5454
5555 @Callable(i)
5656 func predict (input1,input2) = {
5757 let scaledInput1 = if ((input1 == 1))
5858 then 1000000
5959 else 0
6060 let scaledInput2 = if ((input2 == 1))
6161 then 1000000
6262 else 0
6363 let inputs = [scaledInput1, scaledInput2]
64- let $t021132211 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
65- let debugLayer1 = $t021132211._1
66- let layer1Output = $t021132211._2
67- let $t022162320 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
68- let debugLayer2 = $t022162320._1
69- let layer2Output = $t022162320._2
64+ let $t021162214 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
65+ let debugLayer1 = $t021162214._1
66+ let layer1Output = $t021162214._2
67+ let $t022192323 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
68+ let debugLayer2 = $t022192323._1
69+ let layer2Output = $t022192323._2
7070 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
7171 }
7272
7373

github/deemru/w8io/6500d08 
37.76 ms