tx · FW5PA4wXaJvFdUbFe4iU7VZQgY6XboWoXMhe3W5iUXmx

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.03.24 18:25 [3032312] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "FW5PA4wXaJvFdUbFe4iU7VZQgY6XboWoXMhe3W5iUXmx", "fee": 1000000, "feeAssetId": null, "timestamp": 1711293939384, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "3AoZn1s57vdLnLQZZuYpottVB4r43i5PV1sGTSEXTEhDWbWVVRaUMPbKG3BaRBGvBs9tPcosP5rGqNTE2WyYWWvu" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAgAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAASAnZCQAETAAAAAIA//////+zrIUFAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAABfA54JAARMAAAAAgD//////58MgwUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP//////2YbeCQAETAAAAAIAAAAAAAAzuDoFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAB7v3AJAARMAAAAAgD//////4ylsAUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAAAAAAAANUR+BQAAAANuaWwBAAAAB3NpZ21vaWQAAAACAAAAAXoAAAALZGVidWdQcmVmaXgEAAAAAWUAAAAAAAApekkEAAAABGJhc2UAAAAAAAAPQkAEAAAACXBvc2l0aXZlWgMJAABmAAAAAgAAAAAAAAAAAAUAAAABegkBAAAAAS0AAAABBQAAAAF6BQAAAAF6BAAAAAdleHBQYXJ0CQAAawAAAAMFAAAAAWUFAAAABGJhc2UFAAAACXBvc2l0aXZlWgQAAAAIc2lnVmFsdWUJAABrAAAAAwUAAAAEYmFzZQUAAAAEYmFzZQkAAGQAAAACBQAAAARiYXNlBQAAAAdleHBQYXJ0CQAFFAAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAACXBvc2l0aXZlWgUAAAAJcG9zaXRpdmVaCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAdleHBQYXJ0BQAAAAdleHBQYXJ0CQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAhzaWdWYWx1ZQUAAAAIc2lnVmFsdWUFAAAAA25pbAUAAAAIc2lnVmFsdWUBAAAACmRvdFByb2R1Y3QAAAACAAAAAWEAAAABYgQAAAAIcHJvZHVjdDAJAABrAAAAAwkAAZEAAAACBQAAAAFhAAAAAAAAAAAACQABkQAAAAIFAAAAAWIAAAAAAAAAAAAAAAAAAAAPQkAEAAAACHByb2R1Y3QxCQAAawAAAAMJAAGRAAAAAgUAAAABYQAAAAAAAAAAAQkAAZEAAAACBQAAAAFiAAAAAAAAAAABAAAAAAAAD0JACQAAZAAAAAIFAAAACHByb2R1Y3QwBQAAAAhwcm9kdWN0MQEAAAALZm9yd2FyZFBhc3MAAAAEAAAABWlucHV0AAAAB3dlaWdodHMAAAAGYmlhc2VzAAAABWxheWVyBAAAAARzdW0wCQAAZAAAAAIJAQAAAApkb3RQcm9kdWN0AAAAAgUAAAAFaW5wdXQJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAAEAAAABHN1bTEJAABkAAAAAgkBAAAACmRvdFByb2R1Y3QAAAACBQAAAAVpbnB1dAkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABCQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAQQAAAALJHQwMTMzMTEzODgJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMAkAASwAAAACBQAAAAVsYXllcgIAAAAETDFOMQQAAAANc2lnbW9pZERlYnVnMAgFAAAACyR0MDEzMzExMzg4AAAAAl8xBAAAAARzaWcwCAUAAAALJHQwMTMzMTEzODgAAAACXzIEAAAACyR0MDEzOTMxNDUwCQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTEJAAEsAAAAAgUAAAAFbGF5ZXICAAAABEwxTjIEAAAADXNpZ21vaWREZWJ1ZzEIBQAAAAskdDAxMzkzMTQ1MAAAAAJfMQQAAAAEc2lnMQgFAAAACyR0MDEzOTMxNDUwAAAAAl8yCQAFFAAAAAIJAARMAAAAAgUAAAAEc2lnMAkABEwAAAACBQAAAARzaWcxCQAETAAAAAIFAAAABHN1bTAJAARMAAAAAgUAAAAEc3VtMQUAAAADbmlsCQAETgAAAAIFAAAADXNpZ21vaWREZWJ1ZzAFAAAADXNpZ21vaWREZWJ1ZzEBAAAAEHhvck5ldXJhbE5ldHdvcmsAAAACAAAABmlucHV0MQAAAAZpbnB1dDIEAAAABWlucHV0CQAETAAAAAIFAAAABmlucHV0MQkABEwAAAACBQAAAAZpbnB1dDIFAAAAA25pbAQAAAALJHQwMTYyODE3MjAJAQAAAAtmb3J3YXJkUGFzcwAAAAQFAAAABWlucHV0BQAAAA1sYXllcjFXZWlnaHRzBQAAAAxsYXllcjFCaWFzZXMCAAAAAkhMBAAAABFoaWRkZW5MYXllck91dHB1dAgFAAAACyR0MDE2MjgxNzIwAAAAAl8xBAAAAAtoaWRkZW5EZWJ1ZwgFAAAACyR0MDE2MjgxNzIwAAAAAl8yBAAAAAskdDAxNzI1MTg2MAkBAAAAB3NpZ21vaWQAAAACCQAAZAAAAAIJAQAAAApkb3RQcm9kdWN0AAAAAgkABEwAAAACCQABkQAAAAIFAAAAEWhpZGRlbkxheWVyT3V0cHV0AAAAAAAAAAAACQAETAAAAAIJAAGRAAAAAgUAAAARaGlkZGVuTGF5ZXJPdXRwdXQAAAAAAAAAAAEFAAAAA25pbAkAAZEAAAACBQAAAA1sYXllcjJXZWlnaHRzAAAAAAAAAAAACQABkQAAAAIFAAAADGxheWVyMkJpYXNlcwAAAAAAAAAAAAIAAAACT0wEAAAAC291dHB1dERlYnVnCAUAAAALJHQwMTcyNTE4NjAAAAACXzEEAAAABm91dHB1dAgFAAAACyR0MDE3MjUxODYwAAAAAl8yCQAFFAAAAAIJAARMAAAAAgUAAAAGb3V0cHV0CQAETAAAAAIJAABkAAAAAgkBAAAACmRvdFByb2R1Y3QAAAACCQAETAAAAAIJAAGRAAAAAgUAAAARaGlkZGVuTGF5ZXJPdXRwdXQAAAAAAAAAAAAJAARMAAAAAgkAAZEAAAACBQAAABFoaWRkZW5MYXllck91dHB1dAAAAAAAAAAAAQUAAAADbmlsCQABkQAAAAIFAAAADWxheWVyMldlaWdodHMAAAAAAAAAAAAJAAGRAAAAAgUAAAAMbGF5ZXIyQmlhc2VzAAAAAAAAAAAACQAETAAAAAIJAAGRAAAAAgUAAAARaGlkZGVuTGF5ZXJPdXRwdXQAAAAAAAAAAAIJAARMAAAAAgkAAZEAAAACBQAAABFoaWRkZW5MYXllck91dHB1dAAAAAAAAAAAAwUAAAADbmlsCQAETgAAAAIFAAAAC2hpZGRlbkRlYnVnBQAAAAtvdXRwdXREZWJ1ZwAAAAEAAAABaQEAAAAQcHJlZGljdF9vcmlnaW5hbAAAAAIAAAAGaW5wdXQxAAAABmlucHV0MgQAAAAMc2NhbGVkSW5wdXQxAwkAAAAAAAACBQAAAAZpbnB1dDEAAAAAAAAAAAEAAAAAAAAPQkAAAAAAAAAAAAAEAAAADHNjYWxlZElucHV0MgMJAAAAAAAAAgUAAAAGaW5wdXQyAAAAAAAAAAABAAAAAAAAD0JAAAAAAAAAAAAABAAAAAskdDAyMjU0MjMzNQkBAAAAEHhvck5ldXJhbE5ldHdvcmsAAAACBQAAAAxzY2FsZWRJbnB1dDEFAAAADHNjYWxlZElucHV0MgQAAAAObmV0d29ya091dHB1dHMIBQAAAAskdDAyMjU0MjMzNQAAAAJfMQQAAAAMZGVidWdFbnRyaWVzCAUAAAALJHQwMjI1NDIzMzUAAAACXzIJAAROAAAAAgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgIAAAAGcmVzdWx0CQABkQAAAAIFAAAADm5ldHdvcmtPdXRwdXRzAAAAAAAAAAAACQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACAgAAAA5vdXRwdXRMYXllclN1bQkAAZEAAAACBQAAAA5uZXR3b3JrT3V0cHV0cwAAAAAAAAAAAQkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgIAAAAVaGlkZGVuTGF5ZXJPdXRwdXQxU3VtCQABkQAAAAIFAAAADm5ldHdvcmtPdXRwdXRzAAAAAAAAAAACCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACAgAAABVoaWRkZW5MYXllck91dHB1dDJTdW0JAAGRAAAAAgUAAAAObmV0d29ya091dHB1dHMAAAAAAAAAAAMFAAAAA25pbAUAAAAMZGVidWdFbnRyaWVzAAAAANKLcGQ=", "height": 3032312, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: J63xWbm4vHe8K69nUjTn84N9HuR4NGt6i3fVHent4GN Next: 6ikdwN6r2UXQdkEA6cxgC4p7TaERghmy8jqXLicqgtRG Diff:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[6004965, 6007324], [4141966, 4142525]]
4+let layer1Weights = [[4721113, -5002107], [6226846, -6353789]]
55
6-let layer1Biases = [-2590503, -6356371]
6+let layer1Biases = [-2521378, 3389498]
77
8-let layer2Weights = [[8329656, -8971418]]
8+let layer2Weights = [[8109936, -7559760]]
99
10-let layer2Biases = [-3811788]
10+let layer2Biases = [3490942]
1111
1212 func sigmoid (z,debugPrefix) = {
1313 let e = 2718281
2121 }
2222
2323
24-func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
25- let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
26- let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + biases[1])
27- let $t011811237 = sigmoid(sum0, (debugPrefix + "L1N0"))
28- let debug0 = $t011811237._1
29- let sig0 = $t011811237._2
30- let $t012421298 = sigmoid(sum1, (debugPrefix + "L1N1"))
31- let debug1 = $t012421298._1
32- let sig1 = $t012421298._2
33- $Tuple2([sig0, sig1], (debug0 ++ debug1))
24+func dotProduct (a,b) = {
25+ let product0 = fraction(a[0], b[0], 1000000)
26+ let product1 = fraction(a[1], b[1], 1000000)
27+ (product0 + product1)
3428 }
3529
3630
37-func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
38- let sum0 = ((fraction(input[0], weights[0], 1000000) + fraction(input[1], weights[1], 1000000)) + biases)
39- let sum1 = ((fraction(input[0], weights[0], 1000000) + fraction(input[1], weights[1], 1000000)) + biases)
40- let $t016521708 = sigmoid(sum0, (debugPrefix + "L2N0"))
41- let debug0 = $t016521708._1
42- let sig0 = $t016521708._2
43- let $t017131769 = sigmoid(sum1, (debugPrefix + "L2N1"))
44- let debug1 = $t017131769._1
45- let sig1 = $t017131769._2
46- $Tuple2(sig0, (debug0 ++ debug1))
31+func forwardPass (input,weights,biases,layer) = {
32+ let sum0 = (dotProduct(input, weights[0]) + biases[0])
33+ let sum1 = (dotProduct(input, weights[1]) + biases[1])
34+ let $t013311388 = sigmoid(sum0, (layer + "L1N1"))
35+ let sigmoidDebug0 = $t013311388._1
36+ let sig0 = $t013311388._2
37+ let $t013931450 = sigmoid(sum1, (layer + "L1N2"))
38+ let sigmoidDebug1 = $t013931450._1
39+ let sig1 = $t013931450._2
40+ $Tuple2([sig0, sig1, sum0, sum1], (sigmoidDebug0 ++ sigmoidDebug1))
41+ }
42+
43+
44+func xorNeuralNetwork (input1,input2) = {
45+ let input = [input1, input2]
46+ let $t016281720 = forwardPass(input, layer1Weights, layer1Biases, "HL")
47+ let hiddenLayerOutput = $t016281720._1
48+ let hiddenDebug = $t016281720._2
49+ let $t017251860 = sigmoid((dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), "OL")
50+ let outputDebug = $t017251860._1
51+ let output = $t017251860._2
52+ $Tuple2([output, (dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), hiddenLayerOutput[2], hiddenLayerOutput[3]], (hiddenDebug ++ outputDebug))
4753 }
4854
4955
5056 @Callable(i)
51-func predict (input1,input2) = {
57+func predict_original (input1,input2) = {
5258 let scaledInput1 = if ((input1 == 1))
5359 then 1000000
5460 else 0
5561 let scaledInput2 = if ((input2 == 1))
5662 then 1000000
5763 else 0
58- let inputs = [scaledInput1, scaledInput2]
59- let $t020302128 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
60- let layer1Output = $t020302128._1
61- let debugLayer1 = $t020302128._2
62- let $t021332243 = forwardPassLayer2(layer1Output, layer2Weights[0], layer2Biases[0], "Layer2")
63- let layer2Output = $t021332243._1
64- let debugLayer2 = $t021332243._2
65- (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
64+ let $t022542335 = xorNeuralNetwork(scaledInput1, scaledInput2)
65+ let networkOutputs = $t022542335._1
66+ let debugEntries = $t022542335._2
67+ ([IntegerEntry("result", networkOutputs[0]), IntegerEntry("outputLayerSum", networkOutputs[1]), IntegerEntry("hiddenLayerOutput1Sum", networkOutputs[2]), IntegerEntry("hiddenLayerOutput2Sum", networkOutputs[3])] ++ debugEntries)
6668 }
6769
6870
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[6004965, 6007324], [4141966, 4142525]]
4+let layer1Weights = [[4721113, -5002107], [6226846, -6353789]]
55
6-let layer1Biases = [-2590503, -6356371]
6+let layer1Biases = [-2521378, 3389498]
77
8-let layer2Weights = [[8329656, -8971418]]
8+let layer2Weights = [[8109936, -7559760]]
99
10-let layer2Biases = [-3811788]
10+let layer2Biases = [3490942]
1111
1212 func sigmoid (z,debugPrefix) = {
1313 let e = 2718281
1414 let base = 1000000
1515 let positiveZ = if ((0 > z))
1616 then -(z)
1717 else z
1818 let expPart = fraction(e, base, positiveZ)
1919 let sigValue = fraction(base, base, (base + expPart))
2020 $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expPart"), expPart), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
2121 }
2222
2323
24-func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
25- let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
26- let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + biases[1])
27- let $t011811237 = sigmoid(sum0, (debugPrefix + "L1N0"))
28- let debug0 = $t011811237._1
29- let sig0 = $t011811237._2
30- let $t012421298 = sigmoid(sum1, (debugPrefix + "L1N1"))
31- let debug1 = $t012421298._1
32- let sig1 = $t012421298._2
33- $Tuple2([sig0, sig1], (debug0 ++ debug1))
24+func dotProduct (a,b) = {
25+ let product0 = fraction(a[0], b[0], 1000000)
26+ let product1 = fraction(a[1], b[1], 1000000)
27+ (product0 + product1)
3428 }
3529
3630
37-func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
38- let sum0 = ((fraction(input[0], weights[0], 1000000) + fraction(input[1], weights[1], 1000000)) + biases)
39- let sum1 = ((fraction(input[0], weights[0], 1000000) + fraction(input[1], weights[1], 1000000)) + biases)
40- let $t016521708 = sigmoid(sum0, (debugPrefix + "L2N0"))
41- let debug0 = $t016521708._1
42- let sig0 = $t016521708._2
43- let $t017131769 = sigmoid(sum1, (debugPrefix + "L2N1"))
44- let debug1 = $t017131769._1
45- let sig1 = $t017131769._2
46- $Tuple2(sig0, (debug0 ++ debug1))
31+func forwardPass (input,weights,biases,layer) = {
32+ let sum0 = (dotProduct(input, weights[0]) + biases[0])
33+ let sum1 = (dotProduct(input, weights[1]) + biases[1])
34+ let $t013311388 = sigmoid(sum0, (layer + "L1N1"))
35+ let sigmoidDebug0 = $t013311388._1
36+ let sig0 = $t013311388._2
37+ let $t013931450 = sigmoid(sum1, (layer + "L1N2"))
38+ let sigmoidDebug1 = $t013931450._1
39+ let sig1 = $t013931450._2
40+ $Tuple2([sig0, sig1, sum0, sum1], (sigmoidDebug0 ++ sigmoidDebug1))
41+ }
42+
43+
44+func xorNeuralNetwork (input1,input2) = {
45+ let input = [input1, input2]
46+ let $t016281720 = forwardPass(input, layer1Weights, layer1Biases, "HL")
47+ let hiddenLayerOutput = $t016281720._1
48+ let hiddenDebug = $t016281720._2
49+ let $t017251860 = sigmoid((dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), "OL")
50+ let outputDebug = $t017251860._1
51+ let output = $t017251860._2
52+ $Tuple2([output, (dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), hiddenLayerOutput[2], hiddenLayerOutput[3]], (hiddenDebug ++ outputDebug))
4753 }
4854
4955
5056 @Callable(i)
51-func predict (input1,input2) = {
57+func predict_original (input1,input2) = {
5258 let scaledInput1 = if ((input1 == 1))
5359 then 1000000
5460 else 0
5561 let scaledInput2 = if ((input2 == 1))
5662 then 1000000
5763 else 0
58- let inputs = [scaledInput1, scaledInput2]
59- let $t020302128 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
60- let layer1Output = $t020302128._1
61- let debugLayer1 = $t020302128._2
62- let $t021332243 = forwardPassLayer2(layer1Output, layer2Weights[0], layer2Biases[0], "Layer2")
63- let layer2Output = $t021332243._1
64- let debugLayer2 = $t021332243._2
65- (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
64+ let $t022542335 = xorNeuralNetwork(scaledInput1, scaledInput2)
65+ let networkOutputs = $t022542335._1
66+ let debugEntries = $t022542335._2
67+ ([IntegerEntry("result", networkOutputs[0]), IntegerEntry("outputLayerSum", networkOutputs[1]), IntegerEntry("hiddenLayerOutput1Sum", networkOutputs[2]), IntegerEntry("hiddenLayerOutput2Sum", networkOutputs[3])] ++ debugEntries)
6668 }
6769
6870

github/deemru/w8io/6500d08 
23.24 ms