tx · BM7ehgNJjRxHo7mhrVujFVsbX5FhFkTyDeqeCDjJhPbY

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.04.28 11:47 [3082483] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "BM7ehgNJjRxHo7mhrVujFVsbX5FhFkTyDeqeCDjJhPbY", "fee": 1000000, "feeAssetId": null, "timestamp": 1714294078020, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "2ARRCU9mKCd11NJEymBzPrWjEKVj2oQWL82sr2zn7sbAbNtA83QJpMJuL68g56bJxkvY3giHsRUhiutcQ1vfMpSa" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAgAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAACSmxCQAETAAAAAIAAAAAAAAJKpwFAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAAAGUfUJAARMAAAAAgAAAAAAAAZSLQUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP///////AwVCQAETAAAAAIA///////2TQsFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAAMtcUJAARMAAAAAgD///////JPigUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////+i8FBQAAAANuaWwBAAAAEWV4cF9hcHByb3hpbWF0aW9uAAAAAQAAAAF4BAAAAAVzY2FsZQAAAAAAAAGGoAQAAAAIc2NhbGVkX3gJAABpAAAAAgUAAAABeAUAAAAFc2NhbGUEAAAAAWUAAAAAAAApekkEAAAAB2ZhY3RvcjEFAAAACHNjYWxlZF94BAAAAAdmYWN0b3IyCQAAaQAAAAIJAABoAAAAAgUAAAAIc2NhbGVkX3gFAAAACHNjYWxlZF94CQAAaAAAAAIAAAAAAAAAAAIFAAAABXNjYWxlBAAAAAdmYWN0b3IzCQAAaQAAAAIJAABoAAAAAgkAAGgAAAACBQAAAAhzY2FsZWRfeAUAAAAIc2NhbGVkX3gFAAAACHNjYWxlZF94CQAAaAAAAAIJAABoAAAAAgAAAAAAAAAABgUAAAAFc2NhbGUFAAAABXNjYWxlCQAAZAAAAAIJAABkAAAAAgkAAGQAAAACAAAAAAAAAYagBQAAAAdmYWN0b3IxBQAAAAdmYWN0b3IyBQAAAAdmYWN0b3IzAQAAAAdzaWdtb2lkAAAAAgAAAAF6AAAAC2RlYnVnUHJlZml4BAAAAARiYXNlAAAAAAAAAYagBAAAAAlwb3NpdGl2ZVoDCQAAZgAAAAIAAAAAAAAAAAAFAAAAAXoJAQAAAAEtAAAAAQUAAAABegUAAAABegQAAAAIZXhwVmFsdWUJAQAAABFleHBfYXBwcm94aW1hdGlvbgAAAAEFAAAACXBvc2l0aXZlWgQAAAAIc2lnVmFsdWUJAABpAAAAAgkAAGgAAAACBQAAAARiYXNlAAAAAAAAAYagCQAAZAAAAAIFAAAABGJhc2UFAAAACGV4cFZhbHVlCQAFFAAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAACXBvc2l0aXZlWgUAAAAJcG9zaXRpdmVaCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAhleHBWYWx1ZQUAAAAIZXhwVmFsdWUJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAACHNpZ1ZhbHVlBQAAAAhzaWdWYWx1ZQUAAAADbmlsBQAAAAhzaWdWYWx1ZQEAAAARZm9yd2FyZFBhc3NMYXllcjEAAAAEAAAABWlucHV0AAAAB3dlaWdodHMAAAAGYmlhc2VzAAAAC2RlYnVnUHJlZml4BAAAAARzdW0wCQAAZAAAAAIJAABkAAAAAgkAAGsAAAADCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA9CQAkAAGsAAAADCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAA9CQAkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAAEAAAABHN1bTEJAABkAAAAAgkAAGQAAAACCQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAABAAAAAAAAD0JACQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAQQAAAALJHQwMTY4OTE3MzUJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMAIAAAAITGF5ZXIxTjAEAAAABmRlYnVnMAgFAAAACyR0MDE2ODkxNzM1AAAAAl8xBAAAAARzaWcwCAUAAAALJHQwMTY4OTE3MzUAAAACXzIEAAAACyR0MDE3NDAxNzg2CQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTECAAAACExheWVyMU4xBAAAAAZkZWJ1ZzEIBQAAAAskdDAxNzQwMTc4NgAAAAJfMQQAAAAEc2lnMQgFAAAACyR0MDE3NDAxNzg2AAAAAl8yCQAFFAAAAAIJAARMAAAAAgUAAAAEc2lnMAkABEwAAAACBQAAAARzaWcxBQAAAANuaWwJAAROAAAAAgUAAAAGZGVidWcwBQAAAAZkZWJ1ZzEBAAAAEWZvcndhcmRQYXNzTGF5ZXIyAAAABAAAAAVpbnB1dAAAAAd3ZWlnaHRzAAAABmJpYXNlcwAAAAtkZWJ1Z1ByZWZpeAQAAAAEc3VtMAkAAGQAAAACCQAAZAAAAAIJAABrAAAAAwkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPQkAJAABrAAAAAwkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAPQkAJAAGRAAAAAgUAAAAGYmlhc2VzAAAAAAAAAAAABAAAAAskdDAyMDU1MjEwMQkBAAAAB3NpZ21vaWQAAAACBQAAAARzdW0wAgAAAAhMYXllcjJOMAQAAAAGZGVidWcwCAUAAAALJHQwMjA1NTIxMDEAAAACXzEEAAAABHNpZzAIBQAAAAskdDAyMDU1MjEwMQAAAAJfMgkABRQAAAACBQAAAARzaWcwBQAAAAZkZWJ1ZzAAAAABAAAAAWkBAAAAB3ByZWRpY3QAAAACAAAABmlucHV0MQAAAAZpbnB1dDIEAAAADHNjYWxlZElucHV0MQMJAAAAAAAAAgUAAAAGaW5wdXQxAAAAAAAAAAABAAAAAAAAD0JAAAAAAAAAAAAABAAAAAxzY2FsZWRJbnB1dDIDCQAAAAAAAAIFAAAABmlucHV0MgAAAAAAAAAAAQAAAAAAAA9CQAAAAAAAAAAAAAQAAAAGaW5wdXRzCQAETAAAAAIFAAAADHNjYWxlZElucHV0MQkABEwAAAACBQAAAAxzY2FsZWRJbnB1dDIFAAAAA25pbAQAAAALJHQwMjM1MjI0NTAJAQAAABFmb3J3YXJkUGFzc0xheWVyMQAAAAQFAAAABmlucHV0cwUAAAANbGF5ZXIxV2VpZ2h0cwUAAAAMbGF5ZXIxQmlhc2VzAgAAAAZMYXllcjEEAAAADGxheWVyMU91dHB1dAgFAAAACyR0MDIzNTIyNDUwAAAAAl8xBAAAAAtkZWJ1Z0xheWVyMQgFAAAACyR0MDIzNTIyNDUwAAAAAl8yBAAAAAskdDAyNDU1MjU1OQkBAAAAEWZvcndhcmRQYXNzTGF5ZXIyAAAABAUAAAAMbGF5ZXIxT3V0cHV0BQAAAA1sYXllcjJXZWlnaHRzBQAAAAxsYXllcjJCaWFzZXMCAAAABkxheWVyMgQAAAAMbGF5ZXIyT3V0cHV0CAUAAAALJHQwMjQ1NTI1NTkAAAACXzEEAAAAC2RlYnVnTGF5ZXIyCAUAAAALJHQwMjQ1NTI1NTkAAAACXzIJAAROAAAAAgkABE4AAAACCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACAgAAAAZyZXN1bHQFAAAADGxheWVyMk91dHB1dAUAAAADbmlsBQAAAAtkZWJ1Z0xheWVyMQUAAAALZGVidWdMYXllcjIAAAAAHK+HBw==", "height": 3082483, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: GY2gPoqivX3eeH3zPPj8SdvLMnbisXfuy2r24TQoJGgR Next: BgXQzeibJ6sxd4Syjnb3oFzkazJ4kF1yu3ctYeGvh9Tu Diff:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600496, 600732], [414196, 414253]]
4+let layer1Weights = [[600497, 600732], [414197, 414253]]
55
66 let layer1Biases = [-259051, -635637]
77
1010 let layer2Biases = [-381179]
1111
1212 func exp_approximation (x) = {
13- let scale = 10000
13+ let scale = 100000
1414 let scaled_x = (x / scale)
15- let e = 27183
15+ let e = 2718281
1616 let factor1 = scaled_x
1717 let factor2 = ((scaled_x * scaled_x) / (2 * scale))
1818 let factor3 = (((scaled_x * scaled_x) * scaled_x) / ((6 * scale) * scale))
19- (((10000 + factor1) + factor2) + factor3)
19+ (((100000 + factor1) + factor2) + factor3)
2020 }
2121
2222
2323 func sigmoid (z,debugPrefix) = {
24- let base = 10000
24+ let base = 100000
2525 let positiveZ = if ((0 > z))
2626 then -(z)
2727 else z
2828 let expValue = exp_approximation(positiveZ)
29- let sigValue = ((base * 10000) / (base + expValue))
29+ let sigValue = ((base * 100000) / (base + expValue))
3030 $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
3131 }
3232
3434 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
3535 let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
3636 let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + biases[1])
37- let $t017111757 = sigmoid(sum0, "Layer1N0")
38- let debug0 = $t017111757._1
39- let sig0 = $t017111757._2
40- let $t017621808 = sigmoid(sum1, "Layer1N1")
41- let debug1 = $t017621808._1
42- let sig1 = $t017621808._2
37+ let $t016891735 = sigmoid(sum0, "Layer1N0")
38+ let debug0 = $t016891735._1
39+ let sig0 = $t016891735._2
40+ let $t017401786 = sigmoid(sum1, "Layer1N1")
41+ let debug1 = $t017401786._1
42+ let sig1 = $t017401786._2
4343 $Tuple2([sig0, sig1], (debug0 ++ debug1))
4444 }
4545
4646
4747 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
4848 let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
49- let $t020772123 = sigmoid(sum0, "Layer2N0")
50- let debug0 = $t020772123._1
51- let sig0 = $t020772123._2
49+ let $t020552101 = sigmoid(sum0, "Layer2N0")
50+ let debug0 = $t020552101._1
51+ let sig0 = $t020552101._2
5252 $Tuple2(sig0, debug0)
5353 }
5454
6262 then 1000000
6363 else 0
6464 let inputs = [scaledInput1, scaledInput2]
65- let $t023742472 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
66- let layer1Output = $t023742472._1
67- let debugLayer1 = $t023742472._2
68- let $t024772581 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
69- let layer2Output = $t024772581._1
70- let debugLayer2 = $t024772581._2
65+ let $t023522450 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
66+ let layer1Output = $t023522450._1
67+ let debugLayer1 = $t023522450._2
68+ let $t024552559 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
69+ let layer2Output = $t024552559._1
70+ let debugLayer2 = $t024552559._2
7171 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
7272 }
7373
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600496, 600732], [414196, 414253]]
4+let layer1Weights = [[600497, 600732], [414197, 414253]]
55
66 let layer1Biases = [-259051, -635637]
77
88 let layer2Weights = [[832965, -897142]]
99
1010 let layer2Biases = [-381179]
1111
1212 func exp_approximation (x) = {
13- let scale = 10000
13+ let scale = 100000
1414 let scaled_x = (x / scale)
15- let e = 27183
15+ let e = 2718281
1616 let factor1 = scaled_x
1717 let factor2 = ((scaled_x * scaled_x) / (2 * scale))
1818 let factor3 = (((scaled_x * scaled_x) * scaled_x) / ((6 * scale) * scale))
19- (((10000 + factor1) + factor2) + factor3)
19+ (((100000 + factor1) + factor2) + factor3)
2020 }
2121
2222
2323 func sigmoid (z,debugPrefix) = {
24- let base = 10000
24+ let base = 100000
2525 let positiveZ = if ((0 > z))
2626 then -(z)
2727 else z
2828 let expValue = exp_approximation(positiveZ)
29- let sigValue = ((base * 10000) / (base + expValue))
29+ let sigValue = ((base * 100000) / (base + expValue))
3030 $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
3131 }
3232
3333
3434 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
3535 let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
3636 let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + biases[1])
37- let $t017111757 = sigmoid(sum0, "Layer1N0")
38- let debug0 = $t017111757._1
39- let sig0 = $t017111757._2
40- let $t017621808 = sigmoid(sum1, "Layer1N1")
41- let debug1 = $t017621808._1
42- let sig1 = $t017621808._2
37+ let $t016891735 = sigmoid(sum0, "Layer1N0")
38+ let debug0 = $t016891735._1
39+ let sig0 = $t016891735._2
40+ let $t017401786 = sigmoid(sum1, "Layer1N1")
41+ let debug1 = $t017401786._1
42+ let sig1 = $t017401786._2
4343 $Tuple2([sig0, sig1], (debug0 ++ debug1))
4444 }
4545
4646
4747 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
4848 let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
49- let $t020772123 = sigmoid(sum0, "Layer2N0")
50- let debug0 = $t020772123._1
51- let sig0 = $t020772123._2
49+ let $t020552101 = sigmoid(sum0, "Layer2N0")
50+ let debug0 = $t020552101._1
51+ let sig0 = $t020552101._2
5252 $Tuple2(sig0, debug0)
5353 }
5454
5555
5656 @Callable(i)
5757 func predict (input1,input2) = {
5858 let scaledInput1 = if ((input1 == 1))
5959 then 1000000
6060 else 0
6161 let scaledInput2 = if ((input2 == 1))
6262 then 1000000
6363 else 0
6464 let inputs = [scaledInput1, scaledInput2]
65- let $t023742472 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
66- let layer1Output = $t023742472._1
67- let debugLayer1 = $t023742472._2
68- let $t024772581 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
69- let layer2Output = $t024772581._1
70- let debugLayer2 = $t024772581._2
65+ let $t023522450 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
66+ let layer1Output = $t023522450._1
67+ let debugLayer1 = $t023522450._2
68+ let $t024552559 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
69+ let layer2Output = $t024552559._1
70+ let debugLayer2 = $t024552559._2
7171 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
7272 }
7373
7474

github/deemru/w8io/6500d08 
146.38 ms