tx · 2pTSvMCyo3YZTF9HXrKDsZxXPjrtSvBivLFN1ejfzWek

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.03.23 15:56 [3030725] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "2pTSvMCyo3YZTF9HXrKDsZxXPjrtSvBivLFN1ejfzWek", "fee": 1000000, "feeAssetId": null, "timestamp": 1711198668694, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "2mjXjGB4wmwwYk1PRdDLhzWfZSVe2rqnzSBdmaxnRFPNyZGvASFgqkEM7Hap7kd2VESs1bN66PzzStCY3FHaiu2D" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAoAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAP//////cniYCQAETAAAAAIAAAAAAABe8TsFAAAAA25pbAkABEwAAAACCQAETAAAAAIA//////9zl10JAARMAAAAAgD//////5ySIwUAAAADbmlsCQAETAAAAAIJAARMAAAAAgD//////+isZQkABEwAAAACAAAAAAAArrgcBQAAAANuaWwJAARMAAAAAgkABEwAAAACAP//////jS4VCQAETAAAAAIA//////+jw3IFAAAAA25pbAUAAAADbmlsAAAAAAxsYXllcjFCaWFzZXMJAARMAAAAAgD//////9jKZQkABEwAAAACAAAAAAAAI0lMCQAETAAAAAIA//////+3ebMJAARMAAAAAgAAAAAAABzq/gUAAAADbmlsAAAAAA1sYXllcjJXZWlnaHRzCQAETAAAAAIJAARMAAAAAgD//////4xpXQkABEwAAAACAAAAAAAAVEd+CQAETAAAAAIAAAAAAABka3YJAARMAAAAAgAAAAAAADmTEgUAAAADbmlsCQAETAAAAAIJAARMAAAAAgAAAAAAAGiw5AkABEwAAAACAP//////qO4YCQAETAAAAAIA//////+j58sJAARMAAAAAgD//////81JagUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////z8HqCQAETAAAAAIAAAAAAAAs7/IFAAAAA25pbAAAAAANbGF5ZXIzV2VpZ2h0cwkABEwAAAACCQAETAAAAAIA//////93l4gJAARMAAAAAgAAAAAAAJE5MgUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyM0JpYXNlcwkABEwAAAACAP///////RCjBQAAAANuaWwBAAAAB3NpZ21vaWQAAAABAAAAAXoEAAAAAWUAAAAAAAApekkEAAAABGJhc2UAAAAAAAAPQkAEAAAACXBvc2l0aXZlWgMJAABmAAAAAgAAAAAAAAAAAAUAAAABegkBAAAAAS0AAAABBQAAAAF6BQAAAAF6BAAAAAdleHBQYXJ0CQAAawAAAAMFAAAAAWUFAAAABGJhc2UFAAAACXBvc2l0aXZlWgkAAGsAAAADBQAAAARiYXNlBQAAAARiYXNlCQAAZAAAAAIFAAAABGJhc2UFAAAAB2V4cFBhcnQBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAAAwAAAAVpbnB1dAAAAAd3ZWlnaHRzAAAABmJpYXNlcwQAAAAEc3VtMAkAAGQAAAACCQAAZAAAAAIJAABkAAAAAgkAAGQAAAACCQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAIJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAMJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAADAAAAAAAAD0JACQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAAQAAAAEc3VtMQkAAGQAAAACCQAAZAAAAAIJAABkAAAAAgkAAGQAAAACCQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAABAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAIJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAACAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAMJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAADAAAAAAAAD0JACQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAQQAAAAEc3VtMgkAAGQAAAACCQAAZAAAAAIJAABkAAAAAgkAAGQAAAACCQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAACAAAAAAAAAAABAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAIJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAACAAAAAAAAAAACAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAMJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAACAAAAAAAAAAADAAAAAAAAD0JACQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAgQAAAAEc3VtMwkAAGQAAAACCQAAZAAAAAIJAABkAAAAAgkAAGQAAAACCQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAADAAAAAAAAAAAAAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAADAAAAAAAAAAABAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAIJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAADAAAAAAAAAAACAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAMJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAADAAAAAAAAAAADAAAAAAAAD0JACQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAwQAAAAEc2lnMAkBAAAAB3NpZ21vaWQAAAABBQAAAARzdW0wBAAAAARzaWcxCQEAAAAHc2lnbW9pZAAAAAEFAAAABHN1bTEEAAAABHNpZzIJAQAAAAdzaWdtb2lkAAAAAQUAAAAEc3VtMgQAAAAEc2lnMwkBAAAAB3NpZ21vaWQAAAABBQAAAARzdW0zCQAETAAAAAIFAAAABHNpZzAJAARMAAAAAgUAAAAEc2lnMQkABEwAAAACBQAAAARzaWcyCQAETAAAAAIFAAAABHNpZzMFAAAAA25pbAEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAADAAAABWlucHV0AAAAB3dlaWdodHMAAAAGYmlhc2VzBAAAAARzdW0wCQAAZAAAAAIJAABkAAAAAgkAAGsAAAADCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA9CQAkAAGsAAAADCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAA9CQAkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAAEAAAABHN1bTEJAABkAAAAAgkAAGQAAAACCQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAABAAAAAAAAD0JACQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAQQAAAAEc2lnMAkBAAAAB3NpZ21vaWQAAAABBQAAAARzdW0wBAAAAARzaWcxCQEAAAAHc2lnbW9pZAAAAAEFAAAABHN1bTEJAARMAAAAAgUAAAAEc2lnMAkABEwAAAACBQAAAARzaWcxBQAAAANuaWwBAAAAEWZvcndhcmRQYXNzTGF5ZXIzAAAAAwAAAAVpbnB1dAAAAAd3ZWlnaHRzAAAABGJpYXMEAAAACmRvdFByb2R1Y3QJAABkAAAAAgkAAGsAAAADCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAPQkAJAABrAAAAAwkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAD0JABAAAAANzdW0JAABkAAAAAgUAAAAKZG90UHJvZHVjdAUAAAAEYmlhcwkBAAAAB3NpZ21vaWQAAAABBQAAAANzdW0AAAABAAAAAWkBAAAADXByZWRpY3RfdGhyZWUAAAACAAAABmlucHV0MQAAAAZpbnB1dDIEAAAADHNjYWxlZElucHV0MQMJAAAAAAAAAgUAAAAGaW5wdXQxAAAAAAAAAAABAAAAAAAAD0JAAAAAAAAAAAAABAAAAAxzY2FsZWRJbnB1dDIDCQAAAAAAAAIFAAAABmlucHV0MgAAAAAAAAAAAQAAAAAAAA9CQAAAAAAAAAAAAAQAAAAGaW5wdXRzCQAETAAAAAIFAAAADHNjYWxlZElucHV0MQkABEwAAAACBQAAAAxzY2FsZWRJbnB1dDIFAAAAA25pbAQAAAAMbGF5ZXIxT3V0cHV0CQEAAAARZm9yd2FyZFBhc3NMYXllcjEAAAADBQAAAAZpbnB1dHMFAAAADWxheWVyMVdlaWdodHMFAAAADGxheWVyMUJpYXNlcwQAAAAMbGF5ZXIyT3V0cHV0CQEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAADBQAAAAxsYXllcjFPdXRwdXQFAAAADWxheWVyMldlaWdodHMFAAAADGxheWVyMkJpYXNlcwQAAAAGb3V0cHV0CQEAAAARZm9yd2FyZFBhc3NMYXllcjMAAAADBQAAAAxsYXllcjJPdXRwdXQJAARMAAAAAgD//////3eXiAkABEwAAAACAAAAAAAAkTkyBQAAAANuaWwA///////9EKMJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAICAAAABnJlc3VsdAUAAAAGb3V0cHV0BQAAAANuaWwAAAAAi25nkA==", "height": 3030725, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: GxmRfDVFhMDR3JZvnv8chGtkg2gj3wqrqdAiVh1pAjRP Next: AzkepTgdsr4fYGk5ZLwJnFd387drQyDzUAC8hQq6gM1g Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[4721113, -5002107], [6226846, -6353789]]
4+let layer1Weights = [[-9275240, 6222139], [-9201827, -6516189], [-1528731, 11450396], [-7524843, -6044814]]
55
6-let layer1Biases = [-2521378, 3389498]
6+let layer1Biases = [-2569627, 2312524, -4752973, 1895166]
77
8-let layer2Weights = [[8109936, -7559760]]
8+let layer2Weights = [[-7575203, 5523326, 6581110, 3773202], [6861028, -5706216, -6035509, -3323542]]
99
10-let layer2Biases = [3490942]
10+let layer2Biases = [-3161622, 2945010]
1111
12-func sigmoid (z,debugPrefix) = {
12+let layer3Weights = [[-8939640, 9517362]]
13+
14+let layer3Biases = [-192349]
15+
16+func sigmoid (z) = {
1317 let e = 2718281
1418 let base = 1000000
1519 let positiveZ = if ((0 > z))
1620 then -(z)
1721 else z
1822 let expPart = fraction(e, base, positiveZ)
19- let sigValue = fraction(base, base, (base + expPart))
20- $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expPart"), expPart), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
23+ fraction(base, base, (base + expPart))
2124 }
2225
2326
24-func dotProduct (a,b) = {
25- let product0 = fraction(a[0], b[0], 1000000)
26- let product1 = fraction(a[1], b[1], 1000000)
27- (product0 + product1)
27+func forwardPassLayer1 (input,weights,biases) = {
28+ let sum0 = ((((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + fraction(input[2], weights[0][2], 1000000)) + fraction(input[3], weights[0][3], 1000000)) + biases[0])
29+ let sum1 = ((((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + fraction(input[2], weights[1][2], 1000000)) + fraction(input[3], weights[1][3], 1000000)) + biases[1])
30+ let sum2 = ((((fraction(input[0], weights[2][0], 1000000) + fraction(input[1], weights[2][1], 1000000)) + fraction(input[2], weights[2][2], 1000000)) + fraction(input[3], weights[2][3], 1000000)) + biases[2])
31+ let sum3 = ((((fraction(input[0], weights[3][0], 1000000) + fraction(input[1], weights[3][1], 1000000)) + fraction(input[2], weights[3][2], 1000000)) + fraction(input[3], weights[3][3], 1000000)) + biases[3])
32+ let sig0 = sigmoid(sum0)
33+ let sig1 = sigmoid(sum1)
34+ let sig2 = sigmoid(sum2)
35+ let sig3 = sigmoid(sum3)
36+[sig0, sig1, sig2, sig3]
2837 }
2938
3039
31-func forwardPass (input,weights,biases,layer) = {
32- let sum0 = (dotProduct(input, weights[0]) + biases[0])
33- let sum1 = (dotProduct(input, weights[1]) + biases[1])
34- let $t013311388 = sigmoid(sum0, (layer + "L1N1"))
35- let sigmoidDebug0 = $t013311388._1
36- let sig0 = $t013311388._2
37- let $t013931450 = sigmoid(sum1, (layer + "L1N2"))
38- let sigmoidDebug1 = $t013931450._1
39- let sig1 = $t013931450._2
40- $Tuple2([sig0, sig1, sum0, sum1], (sigmoidDebug0 ++ sigmoidDebug1))
40+func forwardPassLayer2 (input,weights,biases) = {
41+ let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
42+ let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + biases[1])
43+ let sig0 = sigmoid(sum0)
44+ let sig1 = sigmoid(sum1)
45+[sig0, sig1]
4146 }
4247
4348
44-func xorNeuralNetwork (input1,input2) = {
45- let input = [input1, input2]
46- let $t016281720 = forwardPass(input, layer1Weights, layer1Biases, "HL")
47- let hiddenLayerOutput = $t016281720._1
48- let hiddenDebug = $t016281720._2
49- let $t017251860 = sigmoid((dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), "OL")
50- let outputDebug = $t017251860._1
51- let output = $t017251860._2
52- $Tuple2([output, (dotProduct([hiddenLayerOutput[0], hiddenLayerOutput[1]], layer2Weights[0]) + layer2Biases[0]), hiddenLayerOutput[2], hiddenLayerOutput[3]], (hiddenDebug ++ outputDebug))
49+func forwardPassLayer3 (input,weights,bias) = {
50+ let dotProduct = (fraction(input[0], weights[0], 1000000) + fraction(input[1], weights[0], 1000000))
51+ let sum = (dotProduct + bias)
52+ sigmoid(sum)
5353 }
5454
5555
5656 @Callable(i)
57-func predict (input1,input2) = {
57+func predict_three (input1,input2) = {
5858 let scaledInput1 = if ((input1 == 1))
5959 then 1000000
6060 else 0
6161 let scaledInput2 = if ((input2 == 1))
6262 then 1000000
6363 else 0
64- let $t022452326 = xorNeuralNetwork(scaledInput1, scaledInput2)
65- let networkOutputs = $t022452326._1
66- let debugEntries = $t022452326._2
67- ([IntegerEntry("result", networkOutputs[0]), IntegerEntry("outputLayerSum", networkOutputs[1]), IntegerEntry("hiddenLayerOutput1Sum", networkOutputs[2]), IntegerEntry("hiddenLayerOutput2Sum", networkOutputs[3])] ++ debugEntries)
64+ let inputs = [scaledInput1, scaledInput2]
65+ let layer1Output = forwardPassLayer1(inputs, layer1Weights, layer1Biases)
66+ let layer2Output = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases)
67+ let output = forwardPassLayer3(layer2Output, [-8939640, 9517362], -192349)
68+[IntegerEntry("result", output)]
6869 }
6970
7071

github/deemru/w8io/6500d08 
27.05 ms