tx · 2M6HVceMCCSxMt5DCypi4Uye6KUcaMYd3fZxBfo1itFh

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.03.20 12:15 [3026153] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "2M6HVceMCCSxMt5DCypi4Uye6KUcaMYd3fZxBfo1itFh", "fee": 1000000, "feeAssetId": null, "timestamp": 1710926227447, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "3Vadz6GAuEBs7M8Fw78k8efUnuiHaDFpM59ZhFvuEor8BggppepSsFeFZJ92o7GRZzHbrH1eSLkjHjnixLU7E9bg" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAgAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAASAnZCQAETAAAAAIA//////+zrIUFAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAABfA54JAARMAAAAAgD//////58MgwUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP//////2YbeCQAETAAAAAIAAAAAAAAzuDoFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAB7v3AJAARMAAAAAgD//////4ylsAUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAAAAAAAANUR+BQAAAANuaWwBAAAAB3NpZ21vaWQAAAACAAAAAXoAAAALZGVidWdQcmVmaXgEAAAAAWUAAAAAAAApekkEAAAABGJhc2UAAAAAAAAPQkAEAAAACXBvc2l0aXZlWgMJAABmAAAAAgAAAAAAAAAAAAUAAAABegkBAAAAAS0AAAABBQAAAAF6BQAAAAF6BAAAAAdleHBQYXJ0CQAAawAAAAMFAAAAAWUFAAAABGJhc2UFAAAACXBvc2l0aXZlWgQAAAAIc2lnVmFsdWUJAABrAAAAAwUAAAAEYmFzZQUAAAAEYmFzZQkAAGQAAAACBQAAAARiYXNlBQAAAAdleHBQYXJ0CQAFFAAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAACXBvc2l0aXZlWgUAAAAJcG9zaXRpdmVaCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAdleHBQYXJ0BQAAAAdleHBQYXJ0CQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAhzaWdWYWx1ZQUAAAAIc2lnVmFsdWUFAAAAA25pbAUAAAAIc2lnVmFsdWUBAAAACmRvdFByb2R1Y3QAAAACAAAAAWEAAAABYgQAAAAIcHJvZHVjdDAJAABrAAAAAwkAAZEAAAACBQAAAAFhAAAAAAAAAAAACQABkQAAAAIFAAAAAWIAAAAAAAAAAAAAAAAAAAAPQkAEAAAACHByb2R1Y3QxCQAAawAAAAMJAAGRAAAAAgUAAAABYQAAAAAAAAAAAQkAAZEAAAACBQAAAAFiAAAAAAAAAAABAAAAAAAAD0JACQAAZAAAAAIFAAAACHByb2R1Y3QwBQAAAAhwcm9kdWN0MQEAAAALZm9yd2FyZFBhc3MAAAAEAAAABWlucHV0AAAAB3dlaWdodHMAAAAGYmlhc2VzAAAABWxheWVyBAAAAARzdW0wCQAAZAAAAAIJAQAAAApkb3RQcm9kdWN0AAAAAgUAAAAFaW5wdXQJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAAEAAAABHN1bTEJAABkAAAAAgkBAAAACmRvdFByb2R1Y3QAAAACBQAAAAVpbnB1dAkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABCQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAQQAAAALJHQwMTMzMTEzODgJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMAkAASwAAAACBQAAAAVsYXllcgIAAAAETDFOMQQAAAANc2lnbW9pZERlYnVnMAgFAAAACyR0MDEzMzExMzg4AAAAAl8xBAAAAARzaWcwCAUAAAALJHQwMTMzMTEzODgAAAACXzIEAAAACyR0MDEzOTMxNDUwCQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTEJAAEsAAAAAgUAAAAFbGF5ZXICAAAABEwxTjIEAAAADXNpZ21vaWREZWJ1ZzEIBQAAAAskdDAxMzkzMTQ1MAAAAAJfMQQAAAAEc2lnMQgFAAAACyR0MDEzOTMxNDUwAAAAAl8yCQAFFAAAAAIJAARMAAAAAgUAAAAEc2lnMAkABEwAAAACBQAAAARzaWcxCQAETAAAAAIFAAAABHN1bTAJAARMAAAAAgUAAAAEc3VtMQUAAAADbmlsCQAETgAAAAIFAAAADXNpZ21vaWREZWJ1ZzAFAAAADXNpZ21vaWREZWJ1ZzEBAAAAEHhvck5ldXJhbE5ldHdvcmsAAAACAAAABmlucHV0MQAAAAZpbnB1dDIEAAAABWlucHV0CQAETAAAAAIFAAAABmlucHV0MQkABEwAAAACBQAAAAZpbnB1dDIFAAAAA25pbAQAAAALJHQwMTYyODE3MjAJAQAAAAtmb3J3YXJkUGFzcwAAAAQFAAAABWlucHV0BQAAAA1sYXllcjFXZWlnaHRzBQAAAAxsYXllcjFCaWFzZXMCAAAAAkhMBAAAABFoaWRkZW5MYXllck91dHB1dAgFAAAACyR0MDE2MjgxNzIwAAAAAl8xBAAAAAtoaWRkZW5EZWJ1ZwgFAAAACyR0MDE2MjgxNzIwAAAAAl8yBAAAAAskdDAxNzI1MTg2MAkBAAAAB3NpZ21vaWQAAAACCQAAZAAAAAIJAQAAAApkb3RQcm9kdWN0AAAAAgkABEwAAAACCQABkQAAAAIFAAAAEWhpZGRlbkxheWVyT3V0cHV0AAAAAAAAAAAACQAETAAAAAIJAAGRAAAAAgUAAAARaGlkZGVuTGF5ZXJPdXRwdXQAAAAAAAAAAAEFAAAAA25pbAkAAZEAAAACBQAAAA1sYXllcjJXZWlnaHRzAAAAAAAAAAAACQABkQAAAAIFAAAADGxheWVyMkJpYXNlcwAAAAAAAAAAAAIAAAACT0wEAAAAC291dHB1dERlYnVnCAUAAAALJHQwMTcyNTE4NjAAAAACXzEEAAAABm91dHB1dAgFAAAACyR0MDE3MjUxODYwAAAAAl8yCQAFFAAAAAIJAARMAAAAAgUAAAAGb3V0cHV0CQAETAAAAAIJAABkAAAAAgkBAAAACmRvdFByb2R1Y3QAAAACCQAETAAAAAIJAAGRAAAAAgUAAAARaGlkZGVuTGF5ZXJPdXRwdXQAAAAAAAAAAAAJAARMAAAAAgkAAZEAAAACBQAAABFoaWRkZW5MYXllck91dHB1dAAAAAAAAAAAAQUAAAADbmlsCQABkQAAAAIFAAAADWxheWVyMldlaWdodHMAAAAAAAAAAAAJAAGRAAAAAgUAAAAMbGF5ZXIyQmlhc2VzAAAAAAAAAAAACQAETAAAAAIJAAGRAAAAAgUAAAARaGlkZGVuTGF5ZXJPdXRwdXQAAAAAAAAAAAIJAARMAAAAAgkAAZEAAAACBQAAABFoaWRkZW5MYXllck91dHB1dAAAAAAAAAAAAwUAAAADbmlsCQAETgAAAAIFAAAAC2hpZGRlbkRlYnVnBQAAAAtvdXRwdXREZWJ1ZwAAAAEAAAABaQEAAAAHcHJlZGljdAAAAAIAAAAGaW5wdXQxAAAABmlucHV0MgQAAAAMc2NhbGVkSW5wdXQxAwkAAAAAAAACBQAAAAZpbnB1dDEAAAAAAAAAAAEAAAAAAAAPQkAAAAAAAAAAAAAEAAAADHNjYWxlZElucHV0MgMJAAAAAAAAAgUAAAAGaW5wdXQyAAAAAAAAAAABAAAAAAAAD0JAAAAAAAAAAAAABAAAAAskdDAyMjQ1MjMyNgkBAAAAEHhvck5ldXJhbE5ldHdvcmsAAAACBQAAAAxzY2FsZWRJbnB1dDEFAAAADHNjYWxlZElucHV0MgQAAAAObmV0d29ya091dHB1dHMIBQAAAAskdDAyMjQ1MjMyNgAAAAJfMQQAAAAMZGVidWdFbnRyaWVzCAUAAAALJHQwMjI0NTIzMjYAAAACXzIJAAROAAAAAgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgIAAAAGcmVzdWx0CQABkQAAAAIFAAAADm5ldHdvcmtPdXRwdXRzAAAAAAAAAAAACQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACAgAAAA5vdXRwdXRMYXllclN1bQkAAZEAAAACBQAAAA5uZXR3b3JrT3V0cHV0cwAAAAAAAAAAAQkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgIAAAAVaGlkZGVuTGF5ZXJPdXRwdXQxU3VtCQABkQAAAAIFAAAADm5ldHdvcmtPdXRwdXRzAAAAAAAAAAACCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACAgAAABVoaWRkZW5MYXllck91dHB1dDJTdW0JAAGRAAAAAgUAAAAObmV0d29ya091dHB1dHMAAAAAAAAAAAMFAAAAA25pbAUAAAAMZGVidWdFbnRyaWVzAAAAAGJHpjM=", "height": 3026153, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: FPJLJBHCFpWbureEe13BzNSfQbZFGFP6nVQzTvxGcaVB Next: GQHPbAkP53hLndf8C5H2BQd1F8tqmxtqcxj1gUw2S33K Diff:
OldNewDifferences
1212 func sigmoid (z,debugPrefix) = {
1313 let e = 2718281
1414 let base = 1000000
15- let negativeZ = (-1 * z)
16- let expPart = fraction(e, negativeZ, base)
15+ let positiveZ = if ((0 > z))
16+ then -(z)
17+ else z
18+ let expPart = fraction(e, base, positiveZ)
1719 let sigValue = fraction(base, base, (base + expPart))
18- $Tuple2([IntegerEntry((debugPrefix + "negativeZ"), negativeZ), IntegerEntry((debugPrefix + "expPart"), expPart)], sigValue)
20+ $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expPart"), expPart), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
1921 }
2022
2123
2931 func forwardPass (input,weights,biases,layer) = {
3032 let sum0 = (dotProduct(input, weights[0]) + biases[0])
3133 let sum1 = (dotProduct(input, weights[1]) + biases[1])
32- let $t010051062 = sigmoid(sum0, (layer + "L1N1"))
33- let sigmoidDebug0 = $t010051062._1
34- let sig0 = $t010051062._2
35- let $t010671124 = sigmoid(sum1, (layer + "L1N2"))
36- let sigmoidDebug1 = $t010671124._1
37- let sig1 = $t010671124._2
34+ let $t013311388 = sigmoid(sum0, (layer + "L1N1"))
35+ let sigmoidDebug0 = $t013311388._1
36+ let sig0 = $t013311388._2
37+ let $t013931450 = sigmoid(sum1, (layer + "L1N2"))
38+ let sigmoidDebug1 = $t013931450._1
39+ let sig1 = $t013931450._2
3840 $Tuple2([sig0, sig1, sum0, sum1], (sigmoidDebug0 ++ sigmoidDebug1))
3941 }
4042
4143
4244 func xorNeuralNetwork (input1,input2) = {
4345 let input = [input1, input2]
44- let $t013021394 = forwardPass(input, layer1Weights, layer1Biases, "HL")
45- let hiddenLayerOutput = $t013021394._1
46- let hiddenDebug = $t013021394._2
47- let $t013991534 = sigmoid((dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), "OL")
48- let outputDebug = $t013991534._1
49- let output = $t013991534._2
46+ let $t016281720 = forwardPass(input, layer1Weights, layer1Biases, "HL")
47+ let hiddenLayerOutput = $t016281720._1
48+ let hiddenDebug = $t016281720._2
49+ let $t017251860 = sigmoid((dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), "OL")
50+ let outputDebug = $t017251860._1
51+ let output = $t017251860._2
5052 $Tuple2([output, (dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), hiddenLayerOutput[2], hiddenLayerOutput[3]], (hiddenDebug ++ outputDebug))
5153 }
5254
5961 let scaledInput2 = if ((input2 == 1))
6062 then 1000000
6163 else 0
62- let $t019192000 = xorNeuralNetwork(scaledInput1, scaledInput2)
63- let networkOutputs = $t019192000._1
64- let debugEntries = $t019192000._2
64+ let $t022452326 = xorNeuralNetwork(scaledInput1, scaledInput2)
65+ let networkOutputs = $t022452326._1
66+ let debugEntries = $t022452326._2
6567 ([IntegerEntry("result", networkOutputs[0]), IntegerEntry("outputLayerSum", networkOutputs[1]), IntegerEntry("hiddenLayerOutput1Sum", networkOutputs[2]), IntegerEntry("hiddenLayerOutput2Sum", networkOutputs[3])] ++ debugEntries)
6668 }
6769
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
44 let layer1Weights = [[4721113, -5002107], [6226846, -6353789]]
55
66 let layer1Biases = [-2521378, 3389498]
77
88 let layer2Weights = [[8109936, -7559760]]
99
1010 let layer2Biases = [3490942]
1111
1212 func sigmoid (z,debugPrefix) = {
1313 let e = 2718281
1414 let base = 1000000
15- let negativeZ = (-1 * z)
16- let expPart = fraction(e, negativeZ, base)
15+ let positiveZ = if ((0 > z))
16+ then -(z)
17+ else z
18+ let expPart = fraction(e, base, positiveZ)
1719 let sigValue = fraction(base, base, (base + expPart))
18- $Tuple2([IntegerEntry((debugPrefix + "negativeZ"), negativeZ), IntegerEntry((debugPrefix + "expPart"), expPart)], sigValue)
20+ $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expPart"), expPart), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
1921 }
2022
2123
2224 func dotProduct (a,b) = {
2325 let product0 = fraction(a[0], b[0], 1000000)
2426 let product1 = fraction(a[1], b[1], 1000000)
2527 (product0 + product1)
2628 }
2729
2830
2931 func forwardPass (input,weights,biases,layer) = {
3032 let sum0 = (dotProduct(input, weights[0]) + biases[0])
3133 let sum1 = (dotProduct(input, weights[1]) + biases[1])
32- let $t010051062 = sigmoid(sum0, (layer + "L1N1"))
33- let sigmoidDebug0 = $t010051062._1
34- let sig0 = $t010051062._2
35- let $t010671124 = sigmoid(sum1, (layer + "L1N2"))
36- let sigmoidDebug1 = $t010671124._1
37- let sig1 = $t010671124._2
34+ let $t013311388 = sigmoid(sum0, (layer + "L1N1"))
35+ let sigmoidDebug0 = $t013311388._1
36+ let sig0 = $t013311388._2
37+ let $t013931450 = sigmoid(sum1, (layer + "L1N2"))
38+ let sigmoidDebug1 = $t013931450._1
39+ let sig1 = $t013931450._2
3840 $Tuple2([sig0, sig1, sum0, sum1], (sigmoidDebug0 ++ sigmoidDebug1))
3941 }
4042
4143
4244 func xorNeuralNetwork (input1,input2) = {
4345 let input = [input1, input2]
44- let $t013021394 = forwardPass(input, layer1Weights, layer1Biases, "HL")
45- let hiddenLayerOutput = $t013021394._1
46- let hiddenDebug = $t013021394._2
47- let $t013991534 = sigmoid((dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), "OL")
48- let outputDebug = $t013991534._1
49- let output = $t013991534._2
46+ let $t016281720 = forwardPass(input, layer1Weights, layer1Biases, "HL")
47+ let hiddenLayerOutput = $t016281720._1
48+ let hiddenDebug = $t016281720._2
49+ let $t017251860 = sigmoid((dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), "OL")
50+ let outputDebug = $t017251860._1
51+ let output = $t017251860._2
5052 $Tuple2([output, (dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), hiddenLayerOutput[2], hiddenLayerOutput[3]], (hiddenDebug ++ outputDebug))
5153 }
5254
5355
5456 @Callable(i)
5557 func predict (input1,input2) = {
5658 let scaledInput1 = if ((input1 == 1))
5759 then 1000000
5860 else 0
5961 let scaledInput2 = if ((input2 == 1))
6062 then 1000000
6163 else 0
62- let $t019192000 = xorNeuralNetwork(scaledInput1, scaledInput2)
63- let networkOutputs = $t019192000._1
64- let debugEntries = $t019192000._2
64+ let $t022452326 = xorNeuralNetwork(scaledInput1, scaledInput2)
65+ let networkOutputs = $t022452326._1
66+ let debugEntries = $t022452326._2
6567 ([IntegerEntry("result", networkOutputs[0]), IntegerEntry("outputLayerSum", networkOutputs[1]), IntegerEntry("hiddenLayerOutput1Sum", networkOutputs[2]), IntegerEntry("hiddenLayerOutput2Sum", networkOutputs[3])] ++ debugEntries)
6668 }
6769
6870

github/deemru/w8io/6500d08 
25.99 ms