tx · 7pAj49vY3uDEe7WV34bD7BY2ayD2n8t87qYQ9jXybgWV

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.03.23 15:34 [3030701] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "7pAj49vY3uDEe7WV34bD7BY2ayD2n8t87qYQ9jXybgWV", "fee": 1000000, "feeAssetId": null, "timestamp": 1711197295665, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "4oq6XyUna6Zq2NMKU9hHLkLArGMCbX7bowBeGmUmjrt3B7TjxoLR96aZ2DwKc2woG6fJx1gm1qsWVmHTimkTRs8h" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAcAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAAW6DlCQAETAAAAAIAAAAAAABbqhwFAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAAA/M44JAARMAAAAAgAAAAAAAD81vQUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP//////2HjZCQAETAAAAAIA//////+fAm0FAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAB/GbgJAARMAAAAAgD//////3cbZgUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////xdY0BQAAAANuaWwBAAAAB3NpZ21vaWQAAAABAAAAAXoEAAAAAWUAAAAAAAApekkEAAAABGJhc2UAAAAAAAAPQkAEAAAACXBvc2l0aXZlWgMJAABmAAAAAgAAAAAAAAAAAAUAAAABegkBAAAAAS0AAAABBQAAAAF6BQAAAAF6BAAAAAdleHBQYXJ0CQAAawAAAAMFAAAAAWUFAAAABGJhc2UFAAAACXBvc2l0aXZlWgkAAGsAAAADBQAAAARiYXNlBQAAAARiYXNlCQAAZAAAAAIFAAAABGJhc2UFAAAAB2V4cFBhcnQBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAAAwAAAAVpbnB1dAAAAAd3ZWlnaHRzAAAABmJpYXNlcwQAAAAEc3VtMAkAAGQAAAACCQAAZAAAAAIJAABrAAAAAwkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPQkAJAABrAAAAAwkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAPQkAJAAGRAAAAAgUAAAAGYmlhc2VzAAAAAAAAAAAABAAAAARzdW0xCQAAZAAAAAIJAABkAAAAAgkAAGsAAAADCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAA9CQAkAAGsAAAADCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAQAAAAAAAAAAAQAAAAAAAA9CQAkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAEEAAAABHNpZzAJAQAAAAdzaWdtb2lkAAAAAQUAAAAEc3VtMAQAAAAEc2lnMQkBAAAAB3NpZ21vaWQAAAABBQAAAARzdW0xCQAETAAAAAIFAAAABHNpZzAJAARMAAAAAgUAAAAEc2lnMQUAAAADbmlsAQAAABFmb3J3YXJkUGFzc0xheWVyMgAAAAMAAAAFaW5wdXQAAAAHd2VpZ2h0cwAAAARiaWFzBAAAAApkb3RQcm9kdWN0CQAAZAAAAAIJAABrAAAAAwkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAA9CQAQAAAADc3VtCQAAZAAAAAIFAAAACmRvdFByb2R1Y3QFAAAABGJpYXMJAQAAAAdzaWdtb2lkAAAAAQUAAAADc3VtAAAAAQAAAAFpAQAAAAdwcmVkaWN0AAAAAgAAAAZpbnB1dDEAAAAGaW5wdXQyBAAAAAxzY2FsZWRJbnB1dDEDCQAAAAAAAAIFAAAABmlucHV0MQAAAAAAAAAAAQAAAAAAAA9CQAAAAAAAAAAAAAQAAAAMc2NhbGVkSW5wdXQyAwkAAAAAAAACBQAAAAZpbnB1dDIAAAAAAAAAAAEAAAAAAAAPQkAAAAAAAAAAAAAEAAAABmlucHV0cwkABEwAAAACBQAAAAxzY2FsZWRJbnB1dDEJAARMAAAAAgUAAAAMc2NhbGVkSW5wdXQyBQAAAANuaWwEAAAADGxheWVyMU91dHB1dAkBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAAAwUAAAAGaW5wdXRzBQAAAA1sYXllcjFXZWlnaHRzBQAAAAxsYXllcjFCaWFzZXMEAAAABm91dHB1dAkBAAAAEWZvcndhcmRQYXNzTGF5ZXIyAAAAAwUAAAAMbGF5ZXIxT3V0cHV0CQAETAAAAAIAAAAAAAB/GbgJAARMAAAAAgD//////3cbZgUAAAADbmlsAP//////xdY0CQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACAgAAAAZyZXN1bHQFAAAABm91dHB1dAUAAAADbmlsAAAAADZcM7U=", "height": 3030701, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: DqCRv1ayUUrGkR5twZhPSWEwMYD341o2LVcrJTNzDuet Next: GxmRfDVFhMDR3JZvnv8chGtkg2gj3wqrqdAiVh1pAjRP Diff:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[-9275240, 6222139], [-9201827, -6516189], [-1528731, 11450396], [-7524843, -6044814]]
4+let layer1Weights = [[6004965, 6007324], [4141966, 4142525]]
55
6-let layer1Biases = [-2569627, 2312524, -4752973, 1895166]
6+let layer1Biases = [-2590503, -6356371]
77
8-let layer2Weights = [[-7575203, 5523326, 6581110, 3773202], [6861028, -5706216, -6035509, -3323542]]
8+let layer2Weights = [[8329656, -8971418]]
99
10-let layer2Biases = [-3161622, 2945010]
11-
12-let layer3Weights = [[-8939640, 9517362]]
13-
14-let layer3Biases = [-192349]
10+let layer2Biases = [-3811788]
1511
1612 func sigmoid (z) = {
1713 let e = 2718281
2521
2622
2723 func forwardPassLayer1 (input,weights,biases) = {
28- let sum0 = ((((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + fraction(input[2], weights[0][2], 1000000)) + fraction(input[3], weights[0][3], 1000000)) + biases[0])
29- let sum1 = ((((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + fraction(input[2], weights[1][2], 1000000)) + fraction(input[3], weights[1][3], 1000000)) + biases[1])
30- let sum2 = ((((fraction(input[0], weights[2][0], 1000000) + fraction(input[1], weights[2][1], 1000000)) + fraction(input[2], weights[2][2], 1000000)) + fraction(input[3], weights[2][3], 1000000)) + biases[2])
31- let sum3 = ((((fraction(input[0], weights[3][0], 1000000) + fraction(input[1], weights[3][1], 1000000)) + fraction(input[2], weights[3][2], 1000000)) + fraction(input[3], weights[3][3], 1000000)) + biases[3])
32- let sig0 = sigmoid(sum0)
33- let sig1 = sigmoid(sum1)
34- let sig2 = sigmoid(sum2)
35- let sig3 = sigmoid(sum3)
36-[sig0, sig1, sig2, sig3]
37- }
38-
39-
40-func forwardPassLayer2 (input,weights,biases) = {
4124 let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
4225 let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + biases[1])
4326 let sig0 = sigmoid(sum0)
4629 }
4730
4831
49-func forwardPassLayer3 (input,weights,bias) = {
32+func forwardPassLayer2 (input,weights,bias) = {
5033 let dotProduct = (fraction(input[0], weights[0], 1000000) + fraction(input[1], weights[0], 1000000))
5134 let sum = (dotProduct + bias)
5235 sigmoid(sum)
5437
5538
5639 @Callable(i)
57-func predict_three_xor (input1,input2) = {
40+func predict (input1,input2) = {
5841 let scaledInput1 = if ((input1 == 1))
5942 then 1000000
6043 else 0
6346 else 0
6447 let inputs = [scaledInput1, scaledInput2]
6548 let layer1Output = forwardPassLayer1(inputs, layer1Weights, layer1Biases)
66- let layer2Output = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases)
67- let output = forwardPassLayer3(layer2Output, [-8939640, 9517362], -192349)
49+ let output = forwardPassLayer2(layer1Output, [8329656, -8971418], -3811788)
6850 [IntegerEntry("result", output)]
6951 }
7052
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[-9275240, 6222139], [-9201827, -6516189], [-1528731, 11450396], [-7524843, -6044814]]
4+let layer1Weights = [[6004965, 6007324], [4141966, 4142525]]
55
6-let layer1Biases = [-2569627, 2312524, -4752973, 1895166]
6+let layer1Biases = [-2590503, -6356371]
77
8-let layer2Weights = [[-7575203, 5523326, 6581110, 3773202], [6861028, -5706216, -6035509, -3323542]]
8+let layer2Weights = [[8329656, -8971418]]
99
10-let layer2Biases = [-3161622, 2945010]
11-
12-let layer3Weights = [[-8939640, 9517362]]
13-
14-let layer3Biases = [-192349]
10+let layer2Biases = [-3811788]
1511
1612 func sigmoid (z) = {
1713 let e = 2718281
1814 let base = 1000000
1915 let positiveZ = if ((0 > z))
2016 then -(z)
2117 else z
2218 let expPart = fraction(e, base, positiveZ)
2319 fraction(base, base, (base + expPart))
2420 }
2521
2622
2723 func forwardPassLayer1 (input,weights,biases) = {
28- let sum0 = ((((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + fraction(input[2], weights[0][2], 1000000)) + fraction(input[3], weights[0][3], 1000000)) + biases[0])
29- let sum1 = ((((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + fraction(input[2], weights[1][2], 1000000)) + fraction(input[3], weights[1][3], 1000000)) + biases[1])
30- let sum2 = ((((fraction(input[0], weights[2][0], 1000000) + fraction(input[1], weights[2][1], 1000000)) + fraction(input[2], weights[2][2], 1000000)) + fraction(input[3], weights[2][3], 1000000)) + biases[2])
31- let sum3 = ((((fraction(input[0], weights[3][0], 1000000) + fraction(input[1], weights[3][1], 1000000)) + fraction(input[2], weights[3][2], 1000000)) + fraction(input[3], weights[3][3], 1000000)) + biases[3])
32- let sig0 = sigmoid(sum0)
33- let sig1 = sigmoid(sum1)
34- let sig2 = sigmoid(sum2)
35- let sig3 = sigmoid(sum3)
36-[sig0, sig1, sig2, sig3]
37- }
38-
39-
40-func forwardPassLayer2 (input,weights,biases) = {
4124 let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
4225 let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + biases[1])
4326 let sig0 = sigmoid(sum0)
4427 let sig1 = sigmoid(sum1)
4528 [sig0, sig1]
4629 }
4730
4831
49-func forwardPassLayer3 (input,weights,bias) = {
32+func forwardPassLayer2 (input,weights,bias) = {
5033 let dotProduct = (fraction(input[0], weights[0], 1000000) + fraction(input[1], weights[0], 1000000))
5134 let sum = (dotProduct + bias)
5235 sigmoid(sum)
5336 }
5437
5538
5639 @Callable(i)
57-func predict_three_xor (input1,input2) = {
40+func predict (input1,input2) = {
5841 let scaledInput1 = if ((input1 == 1))
5942 then 1000000
6043 else 0
6144 let scaledInput2 = if ((input2 == 1))
6245 then 1000000
6346 else 0
6447 let inputs = [scaledInput1, scaledInput2]
6548 let layer1Output = forwardPassLayer1(inputs, layer1Weights, layer1Biases)
66- let layer2Output = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases)
67- let output = forwardPassLayer3(layer2Output, [-8939640, 9517362], -192349)
49+ let output = forwardPassLayer2(layer1Output, [8329656, -8971418], -3811788)
6850 [IntegerEntry("result", output)]
6951 }
7052
7153

github/deemru/w8io/6500d08 
23.54 ms