tx · 2EAsscTWTcXLbZe4WsBMSSeTj11Qf1Abr2c6XurS2kqL

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.04.27 17:15 [3081359] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "2EAsscTWTcXLbZe4WsBMSSeTj11Qf1Abr2c6XurS2kqL", "fee": 1000000, "feeAssetId": null, "timestamp": 1714227234026, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "2q1e3UguMMubQmyAwRhfjfj7Qzrb1gJ7QxKMUsqSyqeJWUUo49ThpfxB3qnt1L6gqQktgt5mEX4dTMkTAAiunpho" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAcAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAACSmxCQAETAAAAAIAAAAAAAAJKp0FAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAAAGUfUJAARMAAAAAgAAAAAAAAZSLQUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP///////AwWCQAETAAAAAIA///////2TQsFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAAMtcUJAARMAAAAAgD///////JPigUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////+i8FBQAAAANuaWwBAAAAB3NpZ21vaWQAAAACAAAAAXoAAAALZGVidWdQcmVmaXgEAAAAAWUAAAAAAAApekkEAAAABGJhc2UAAAAAAAAPQkAEAAAACXBvc2l0aXZlWgMJAABmAAAAAgAAAAAAAAAAAAUAAAABegkBAAAAAS0AAAABBQAAAAF6BQAAAAF6BAAAAAdzY2FsZWRaCQAAaQAAAAIFAAAACXBvc2l0aXZlWgAAAAAAAAAnEAQAAAAHZXhwUGFydAkAAGsAAAADBQAAAAFlBQAAAARiYXNlBQAAAAdzY2FsZWRaBAAAAAhzaWdWYWx1ZQkAAGsAAAADBQAAAARiYXNlCQAAZAAAAAIFAAAABGJhc2UFAAAAB2V4cFBhcnQFAAAABGJhc2UJAAUUAAAAAgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgkAASwAAAACBQAAAAtkZWJ1Z1ByZWZpeAIAAAAJcG9zaXRpdmVaBQAAAAlwb3NpdGl2ZVoJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAAB2V4cFBhcnQFAAAAB2V4cFBhcnQJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAACHNpZ1ZhbHVlBQAAAAhzaWdWYWx1ZQUAAAADbmlsBQAAAAhzaWdWYWx1ZQEAAAARZm9yd2FyZFBhc3NMYXllcjEAAAAEAAAABWlucHV0AAAAB3dlaWdodHMAAAAGYmlhc2VzAAAAC2RlYnVnUHJlZml4BAAAAARzdW0wCQAAZAAAAAIJAABkAAAAAgkAAGsAAAADCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA9CQAkAAGsAAAADCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAA9CQAkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAAEAAAABHN1bTEJAABkAAAAAgkAAGQAAAACCQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAABAAAAAAAAD0JACQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAQQAAAALJHQwMTE0NTExOTEJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMAIAAAAITGF5ZXIxTjAEAAAABmRlYnVnMAgFAAAACyR0MDExNDUxMTkxAAAAAl8xBAAAAARzaWcwCAUAAAALJHQwMTE0NTExOTEAAAACXzIEAAAACyR0MDExOTYxMjQyCQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTECAAAACExheWVyMU4xBAAAAAZkZWJ1ZzEIBQAAAAskdDAxMTk2MTI0MgAAAAJfMQQAAAAEc2lnMQgFAAAACyR0MDExOTYxMjQyAAAAAl8yCQAFFAAAAAIJAARMAAAAAgUAAAAEc2lnMAkABEwAAAACBQAAAARzaWcxBQAAAANuaWwJAAROAAAAAgUAAAAGZGVidWcwBQAAAAZkZWJ1ZzEBAAAAEWZvcndhcmRQYXNzTGF5ZXIyAAAABAAAAAVpbnB1dAAAAAd3ZWlnaHRzAAAABmJpYXNlcwAAAAtkZWJ1Z1ByZWZpeAQAAAAEc3VtMAkAAGQAAAACCQAAZAAAAAIJAABrAAAAAwkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPQkAJAABrAAAAAwkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAPQkAJAAGRAAAAAgUAAAAGYmlhc2VzAAAAAAAAAAAABAAAAAskdDAxNTExMTU1NwkBAAAAB3NpZ21vaWQAAAACBQAAAARzdW0wAgAAAAhMYXllcjJOMAQAAAAGZGVidWcwCAUAAAALJHQwMTUxMTE1NTcAAAACXzEEAAAABHNpZzAIBQAAAAskdDAxNTExMTU1NwAAAAJfMgkABRQAAAACBQAAAARzaWcwBQAAAAZkZWJ1ZzAAAAABAAAAAWkBAAAAB3ByZWRpY3QAAAACAAAABmlucHV0MQAAAAZpbnB1dDIEAAAADHNjYWxlZElucHV0MQMJAAAAAAAAAgUAAAAGaW5wdXQxAAAAAAAAAAABAAAAAAAAD0JAAAAAAAAAAAAABAAAAAxzY2FsZWRJbnB1dDIDCQAAAAAAAAIFAAAABmlucHV0MgAAAAAAAAAAAQAAAAAAAA9CQAAAAAAAAAAAAAQAAAAGaW5wdXRzCQAETAAAAAIFAAAADHNjYWxlZElucHV0MQkABEwAAAACBQAAAAxzY2FsZWRJbnB1dDIFAAAAA25pbAQAAAALJHQwMTgwODE5MDYJAQAAABFmb3J3YXJkUGFzc0xheWVyMQAAAAQFAAAABmlucHV0cwUAAAANbGF5ZXIxV2VpZ2h0cwUAAAAMbGF5ZXIxQmlhc2VzAgAAAAZMYXllcjEEAAAADGxheWVyMU91dHB1dAgFAAAACyR0MDE4MDgxOTA2AAAAAl8xBAAAAAtkZWJ1Z0xheWVyMQgFAAAACyR0MDE4MDgxOTA2AAAAAl8yBAAAAAskdDAxOTExMjAxNQkBAAAAEWZvcndhcmRQYXNzTGF5ZXIyAAAABAUAAAAMbGF5ZXIxT3V0cHV0BQAAAA1sYXllcjJXZWlnaHRzBQAAAAxsYXllcjJCaWFzZXMCAAAABkxheWVyMgQAAAAMbGF5ZXIyT3V0cHV0CAUAAAALJHQwMTkxMTIwMTUAAAACXzEEAAAAC2RlYnVnTGF5ZXIyCAUAAAALJHQwMTkxMTIwMTUAAAACXzIJAAROAAAAAgkABE4AAAACCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACAgAAAAZyZXN1bHQFAAAADGxheWVyMk91dHB1dAUAAAADbmlsBQAAAAtkZWJ1Z0xheWVyMQUAAAALZGVidWdMYXllcjIAAAAA+5sCSw==", "height": 3081359, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: 63HL2ET9udUcXr31RyWXVSbexg4dmVv2RJVheQd1HjGm Next: 5hqRJoKMtNcDRPoU3dMpZhfLar5vJieTpCzaPJWSNuV7 Diff:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[-927524, 622214], [-920182, -651619], [-152874, 1145040], [-752484, -604481]]
4+let layer1Weights = [[600497, 600733], [414197, 414253]]
55
6-let layer1Biases = [-256962, 231253, -475298, 189517]
6+let layer1Biases = [-259050, -635637]
77
8-let layer2Weights = [[-757521, 552333, 658111, 377320], [686102, -570621, -603550, -332354]]
8+let layer2Weights = [[832965, -897142]]
99
10-let layer2Biases = [-316162, 294501]
11-
12-let layer3Weights = [[-893964, 951736]]
13-
14-let layer3Biases = [-19235]
10+let layer2Biases = [-381179]
1511
1612 func sigmoid (z,debugPrefix) = {
1713 let e = 2718281
1915 let positiveZ = if ((0 > z))
2016 then -(z)
2117 else z
22- let expPart = fraction(e, base, positiveZ)
18+ let scaledZ = (positiveZ / 10000)
19+ let expPart = fraction(e, base, scaledZ)
2320 let sigValue = fraction(base, (base + expPart), base)
2421 $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expPart"), expPart), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
2522 }
2825 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
2926 let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
3027 let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + biases[1])
31- let sum2 = ((fraction(input[0], weights[2][0], 1000000) + fraction(input[1], weights[2][1], 1000000)) + biases[2])
32- let sum3 = ((fraction(input[0], weights[3][0], 1000000) + fraction(input[1], weights[3][1], 1000000)) + biases[3])
33- let $t014931539 = sigmoid(sum0, "Layer1N0")
34- let debug0 = $t014931539._1
35- let sig0 = $t014931539._2
36- let $t015441590 = sigmoid(sum1, "Layer1N1")
37- let debug1 = $t015441590._1
38- let sig1 = $t015441590._2
39- let $t015951641 = sigmoid(sum2, "Layer1N2")
40- let debug2 = $t015951641._1
41- let sig2 = $t015951641._2
42- let $t016461692 = sigmoid(sum3, "Layer1N3")
43- let debug3 = $t016461692._1
44- let sig3 = $t016461692._2
45- $Tuple2([sig0, sig1, sig2, sig3], (((debug0 ++ debug1) ++ debug2) ++ debug3))
28+ let $t011451191 = sigmoid(sum0, "Layer1N0")
29+ let debug0 = $t011451191._1
30+ let sig0 = $t011451191._2
31+ let $t011961242 = sigmoid(sum1, "Layer1N1")
32+ let debug1 = $t011961242._1
33+ let sig1 = $t011961242._2
34+ $Tuple2([sig0, sig1], (debug0 ++ debug1))
4635 }
4736
4837
4938 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
50- let sum0 = ((((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + fraction(input[2], weights[0][2], 1000000)) + fraction(input[3], weights[0][3], 1000000)) + biases[0])
51- let sum1 = ((((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + fraction(input[2], weights[1][2], 1000000)) + fraction(input[3], weights[1][3], 1000000)) + biases[1])
52- let $t022882334 = sigmoid(sum0, "Layer2N0")
53- let debug0 = $t022882334._1
54- let sig0 = $t022882334._2
55- let $t023392385 = sigmoid(sum1, "Layer2N1")
56- let debug1 = $t023392385._1
57- let sig1 = $t023392385._2
58- $Tuple2([sig0, sig1], (debug0 ++ debug1))
59- }
60-
61-
62-func forwardPassLayer3 (input,weights,biases,debugPrefix) = {
6339 let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
64- let $t026542700 = sigmoid(sum0, "Layer3N0")
65- let debug0 = $t026542700._1
66- let sig0 = $t026542700._2
40+ let $t015111557 = sigmoid(sum0, "Layer2N0")
41+ let debug0 = $t015111557._1
42+ let sig0 = $t015111557._2
6743 $Tuple2(sig0, debug0)
6844 }
6945
7753 then 1000000
7854 else 0
7955 let inputs = [scaledInput1, scaledInput2]
80- let $t029513049 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
81- let layer1Output = $t029513049._1
82- let debugLayer1 = $t029513049._2
83- let $t030543158 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
84- let layer2Output = $t030543158._1
85- let debugLayer2 = $t030543158._2
86- let $t031633267 = forwardPassLayer3(layer2Output, layer3Weights, layer3Biases, "Layer3")
87- let layer3Output = $t031633267._1
88- let debugLayer3 = $t031633267._2
89- ((([IntegerEntry("result", layer3Output)] ++ debugLayer1) ++ debugLayer2) ++ debugLayer3)
56+ let $t018081906 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
57+ let layer1Output = $t018081906._1
58+ let debugLayer1 = $t018081906._2
59+ let $t019112015 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
60+ let layer2Output = $t019112015._1
61+ let debugLayer2 = $t019112015._2
62+ (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
9063 }
9164
9265
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[-927524, 622214], [-920182, -651619], [-152874, 1145040], [-752484, -604481]]
4+let layer1Weights = [[600497, 600733], [414197, 414253]]
55
6-let layer1Biases = [-256962, 231253, -475298, 189517]
6+let layer1Biases = [-259050, -635637]
77
8-let layer2Weights = [[-757521, 552333, 658111, 377320], [686102, -570621, -603550, -332354]]
8+let layer2Weights = [[832965, -897142]]
99
10-let layer2Biases = [-316162, 294501]
11-
12-let layer3Weights = [[-893964, 951736]]
13-
14-let layer3Biases = [-19235]
10+let layer2Biases = [-381179]
1511
1612 func sigmoid (z,debugPrefix) = {
1713 let e = 2718281
1814 let base = 1000000
1915 let positiveZ = if ((0 > z))
2016 then -(z)
2117 else z
22- let expPart = fraction(e, base, positiveZ)
18+ let scaledZ = (positiveZ / 10000)
19+ let expPart = fraction(e, base, scaledZ)
2320 let sigValue = fraction(base, (base + expPart), base)
2421 $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expPart"), expPart), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
2522 }
2623
2724
2825 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
2926 let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
3027 let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + biases[1])
31- let sum2 = ((fraction(input[0], weights[2][0], 1000000) + fraction(input[1], weights[2][1], 1000000)) + biases[2])
32- let sum3 = ((fraction(input[0], weights[3][0], 1000000) + fraction(input[1], weights[3][1], 1000000)) + biases[3])
33- let $t014931539 = sigmoid(sum0, "Layer1N0")
34- let debug0 = $t014931539._1
35- let sig0 = $t014931539._2
36- let $t015441590 = sigmoid(sum1, "Layer1N1")
37- let debug1 = $t015441590._1
38- let sig1 = $t015441590._2
39- let $t015951641 = sigmoid(sum2, "Layer1N2")
40- let debug2 = $t015951641._1
41- let sig2 = $t015951641._2
42- let $t016461692 = sigmoid(sum3, "Layer1N3")
43- let debug3 = $t016461692._1
44- let sig3 = $t016461692._2
45- $Tuple2([sig0, sig1, sig2, sig3], (((debug0 ++ debug1) ++ debug2) ++ debug3))
28+ let $t011451191 = sigmoid(sum0, "Layer1N0")
29+ let debug0 = $t011451191._1
30+ let sig0 = $t011451191._2
31+ let $t011961242 = sigmoid(sum1, "Layer1N1")
32+ let debug1 = $t011961242._1
33+ let sig1 = $t011961242._2
34+ $Tuple2([sig0, sig1], (debug0 ++ debug1))
4635 }
4736
4837
4938 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
50- let sum0 = ((((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + fraction(input[2], weights[0][2], 1000000)) + fraction(input[3], weights[0][3], 1000000)) + biases[0])
51- let sum1 = ((((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + fraction(input[2], weights[1][2], 1000000)) + fraction(input[3], weights[1][3], 1000000)) + biases[1])
52- let $t022882334 = sigmoid(sum0, "Layer2N0")
53- let debug0 = $t022882334._1
54- let sig0 = $t022882334._2
55- let $t023392385 = sigmoid(sum1, "Layer2N1")
56- let debug1 = $t023392385._1
57- let sig1 = $t023392385._2
58- $Tuple2([sig0, sig1], (debug0 ++ debug1))
59- }
60-
61-
62-func forwardPassLayer3 (input,weights,biases,debugPrefix) = {
6339 let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
64- let $t026542700 = sigmoid(sum0, "Layer3N0")
65- let debug0 = $t026542700._1
66- let sig0 = $t026542700._2
40+ let $t015111557 = sigmoid(sum0, "Layer2N0")
41+ let debug0 = $t015111557._1
42+ let sig0 = $t015111557._2
6743 $Tuple2(sig0, debug0)
6844 }
6945
7046
7147 @Callable(i)
7248 func predict (input1,input2) = {
7349 let scaledInput1 = if ((input1 == 1))
7450 then 1000000
7551 else 0
7652 let scaledInput2 = if ((input2 == 1))
7753 then 1000000
7854 else 0
7955 let inputs = [scaledInput1, scaledInput2]
80- let $t029513049 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
81- let layer1Output = $t029513049._1
82- let debugLayer1 = $t029513049._2
83- let $t030543158 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
84- let layer2Output = $t030543158._1
85- let debugLayer2 = $t030543158._2
86- let $t031633267 = forwardPassLayer3(layer2Output, layer3Weights, layer3Biases, "Layer3")
87- let layer3Output = $t031633267._1
88- let debugLayer3 = $t031633267._2
89- ((([IntegerEntry("result", layer3Output)] ++ debugLayer1) ++ debugLayer2) ++ debugLayer3)
56+ let $t018081906 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
57+ let layer1Output = $t018081906._1
58+ let debugLayer1 = $t018081906._2
59+ let $t019112015 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
60+ let layer2Output = $t019112015._1
61+ let debugLayer2 = $t019112015._2
62+ (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
9063 }
9164
9265

github/deemru/w8io/6500d08 
48.56 ms