tx · EUk6SP8KWsjHF1LQH8TKBsex2FAUFa7uDWqcmJfKSMsf

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.04.28 13:45 [3082613] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "EUk6SP8KWsjHF1LQH8TKBsex2FAUFa7uDWqcmJfKSMsf", "fee": 1000000, "feeAssetId": null, "timestamp": 1714301098429, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "nnZjsWb3NihpW1xecXmukJkZZb4ro4TZSTLxZBvLouQ5ushJkLYLrmZ9KZiexSmqQYVX15Z2pEBzGcXqkjdLUoW" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAkAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAACSmxCQAETAAAAAIAAAAAAAAJKpwFAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAAAGUfUJAARMAAAAAgAAAAAAAAZSLQUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP///////AwWCQAETAAAAAIA///////2TQsFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAAMtcYJAARMAAAAAgD///////JPigUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////+i8FBQAAAANuaWwBAAAABmNsYW1wWgAAAAIAAAABegAAAAVsaW1pdAMJAABmAAAAAgUAAAABegUAAAAFbGltaXQFAAAABWxpbWl0AwkAAGYAAAACCQEAAAABLQAAAAEFAAAABWxpbWl0BQAAAAF6CQEAAAABLQAAAAEFAAAABWxpbWl0BQAAAAF6AQAAAApleHBfYXBwcm94AAAAAQAAAAF4BAAAAAhzY2FsZWRfeAkAAGkAAAACBQAAAAF4AAAAAAAAACcQBAAAAAlzY2FsZWRfeDIJAABrAAAAAwUAAAAIc2NhbGVkX3gFAAAACHNjYWxlZF94AAAAAAAAAAABBAAAAAlzY2FsZWRfeDMJAABrAAAAAwUAAAAJc2NhbGVkX3gyBQAAAAhzY2FsZWRfeAAAAAAAAAAAAQQAAAAKZXhwX3Jlc3VsdAkAAGUAAAACCQAAZAAAAAIJAABlAAAAAgAAAAAAAA9CQAUAAAAIc2NhbGVkX3gJAQAAAAhmcmFjdGlvbgAAAAQFAAAACXNjYWxlZF94MgAAAAAAAAehIAAAAAAAAAAAAQUAAAAERE9XTgkBAAAACGZyYWN0aW9uAAAABAUAAAAJc2NhbGVkX3gzAAAAAAAAW42AAAAAAAAAAAABBQAAAARET1dOAwkAAGYAAAACAAAAAAAAAAAABQAAAAF4CQAAZAAAAAIAAAAAAAAPQkAFAAAACmV4cF9yZXN1bHQJAABlAAAAAgAAAAAAAA9CQAUAAAAKZXhwX3Jlc3VsdAEAAAAHc2lnbW9pZAAAAAIAAAABegAAAAtkZWJ1Z1ByZWZpeAQAAAAIY2xhbXBlZFoJAQAAAAZjbGFtcFoAAAACBQAAAAF6AAAAAAAAAYagBAAAAAlwb3NpdGl2ZVoDCQAAZgAAAAIAAAAAAAAAAAAFAAAAAXoJAQAAAAEtAAAAAQUAAAABegUAAAABegQAAAAIZXhwVmFsdWUJAQAAAApleHBfYXBwcm94AAAAAQkBAAAAAS0AAAABBQAAAAlwb3NpdGl2ZVoEAAAACHNpZ1ZhbHVlCQAAawAAAAMAAAAAAAAPQkAJAABkAAAAAgAAAAAAAA9CQAUAAAAIZXhwVmFsdWUAAAAAAAAAAAEJAAUUAAAAAgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgkAASwAAAACBQAAAAtkZWJ1Z1ByZWZpeAIAAAAGaW5wdXRaBQAAAAF6CQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAhjbGFtcGVkWgUAAAAIY2xhbXBlZFoJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAACXBvc2l0aXZlWgUAAAAJcG9zaXRpdmVaCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAhleHBWYWx1ZQUAAAAIZXhwVmFsdWUJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAACHNpZ1ZhbHVlBQAAAAhzaWdWYWx1ZQUAAAADbmlsBQAAAAhzaWdWYWx1ZQEAAAARZm9yd2FyZFBhc3NMYXllcjEAAAAEAAAABWlucHV0AAAAB3dlaWdodHMAAAAGYmlhc2VzAAAAC2RlYnVnUHJlZml4BAAAAARzdW0wCQAAZAAAAAIJAABkAAAAAgkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAAkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAQkAAGgAAAACCQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAAAAAAAAAAGGoAQAAAAEc3VtMQkAAGQAAAACCQAAZAAAAAIJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAEAAAAAAAAAAAAJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAEAAAAAAAAAAAEJAABoAAAAAgkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAEAAAAAAAABhqAEAAAACyR0MDE5ODAyMDMzCQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTACAAAACExheWVyMU4wBAAAAA1kZWJ1Z0VudHJpZXMwCAUAAAALJHQwMTk4MDIwMzMAAAACXzEEAAAABHNpZzAIBQAAAAskdDAxOTgwMjAzMwAAAAJfMgQAAAALJHQwMjAzODIwOTEJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMQIAAAAITGF5ZXIxTjEEAAAADWRlYnVnRW50cmllczEIBQAAAAskdDAyMDM4MjA5MQAAAAJfMQQAAAAEc2lnMQgFAAAACyR0MDIwMzgyMDkxAAAAAl8yBAAAAAlkZWJ1Z0luZm8JAAROAAAAAgUAAAANZGVidWdFbnRyaWVzMAUAAAANZGVidWdFbnRyaWVzMQQAAAAGb3V0cHV0CQAETAAAAAIFAAAABHNpZzAJAARMAAAAAgUAAAAEc2lnMQUAAAADbmlsCQAFFAAAAAIFAAAACWRlYnVnSW5mbwUAAAAGb3V0cHV0AQAAABFmb3J3YXJkUGFzc0xheWVyMgAAAAQAAAAFaW5wdXQAAAAHd2VpZ2h0cwAAAAZiaWFzZXMAAAALZGVidWdQcmVmaXgEAAAABHN1bTAJAABkAAAAAgkAAGQAAAACCQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAAACQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAABCQAAaAAAAAIJAAGRAAAAAgUAAAAGYmlhc2VzAAAAAAAAAAAAAAAAAAAAAYagBAAAAAskdDAyNDAxMjQ1NAkBAAAAB3NpZ21vaWQAAAACBQAAAARzdW0wAgAAAAhMYXllcjJOMAQAAAANZGVidWdFbnRyaWVzMAgFAAAACyR0MDI0MDEyNDU0AAAAAl8xBAAAAARzaWcwCAUAAAALJHQwMjQwMTI0NTQAAAACXzIEAAAACWRlYnVnSW5mbwUAAAANZGVidWdFbnRyaWVzMAQAAAAGb3V0cHV0BQAAAARzaWcwCQAFFAAAAAIFAAAACWRlYnVnSW5mbwUAAAAGb3V0cHV0AAAAAQAAAAFpAQAAAAdwcmVkaWN0AAAAAgAAAAZpbnB1dDEAAAAGaW5wdXQyBAAAAAxzY2FsZWRJbnB1dDEDCQAAAAAAAAIFAAAABmlucHV0MQAAAAAAAAAAAQAAAAAAAA9CQAAAAAAAAAAAAAQAAAAMc2NhbGVkSW5wdXQyAwkAAAAAAAACBQAAAAZpbnB1dDIAAAAAAAAAAAEAAAAAAAAPQkAAAAAAAAAAAAAEAAAABmlucHV0cwkABEwAAAACBQAAAAxzY2FsZWRJbnB1dDEJAARMAAAAAgUAAAAMc2NhbGVkSW5wdXQyBQAAAANuaWwEAAAACyR0MDI3NjYyODY0CQEAAAARZm9yd2FyZFBhc3NMYXllcjEAAAAEBQAAAAZpbnB1dHMFAAAADWxheWVyMVdlaWdodHMFAAAADGxheWVyMUJpYXNlcwIAAAAGTGF5ZXIxBAAAAAtkZWJ1Z0xheWVyMQgFAAAACyR0MDI3NjYyODY0AAAAAl8xBAAAAAxsYXllcjFPdXRwdXQIBQAAAAskdDAyNzY2Mjg2NAAAAAJfMgQAAAALJHQwMjg2OTI5NzMJAQAAABFmb3J3YXJkUGFzc0xheWVyMgAAAAQFAAAADGxheWVyMU91dHB1dAUAAAANbGF5ZXIyV2VpZ2h0cwUAAAAMbGF5ZXIyQmlhc2VzAgAAAAZMYXllcjIEAAAAC2RlYnVnTGF5ZXIyCAUAAAALJHQwMjg2OTI5NzMAAAACXzEEAAAADGxheWVyMk91dHB1dAgFAAAACyR0MDI4NjkyOTczAAAAAl8yCQAETgAAAAIJAAROAAAAAgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgIAAAAGcmVzdWx0BQAAAAxsYXllcjJPdXRwdXQFAAAAA25pbAUAAAALZGVidWdMYXllcjEFAAAAC2RlYnVnTGF5ZXIyAAAAAFCIlZ0=", "height": 3082613, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: Yxm27VmnSiXh83CHjo1DgvTCSkak33fbPnBnSF6qgCS Next: BQxK6Q9RxFkuvKPme7hUqZ728mZQD9T4F4MFJRiKaPvb Diff:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600733], [414197, 414253]]
4+let layer1Weights = [[600497, 600732], [414197, 414253]]
55
66 let layer1Biases = [-259050, -635637]
77
8-let layer2Weights = [[832965, -897142]]
8+let layer2Weights = [[832966, -897142]]
99
1010 let layer2Biases = [-381179]
1111
1717
1818
1919 func exp_approx (x) = {
20- let base = 1000000
21- let maxExp = 200000
22- if ((-(maxExp) > x))
23- then 0
24- else if ((x > maxExp))
25- then (base * base)
26- else (base / (1 + (x / 10000)))
20+ let scaled_x = (x / 10000)
21+ let scaled_x2 = fraction(scaled_x, scaled_x, 1)
22+ let scaled_x3 = fraction(scaled_x2, scaled_x, 1)
23+ let exp_result = (((1000000 - scaled_x) + fraction(scaled_x2, 500000, 1, DOWN)) - fraction(scaled_x3, 6000000, 1, DOWN))
24+ if ((0 > x))
25+ then (1000000 + exp_result)
26+ else (1000000 - exp_result)
2727 }
2828
2929
3333 then -(z)
3434 else z
3535 let expValue = exp_approx(-(positiveZ))
36- let sigValue = (1000000 / (1000000 + expValue))
37- $Tuple2([IntegerEntry((debugPrefix + "clampedZ"), clampedZ), IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
36+ let sigValue = fraction(1000000, (1000000 + expValue), 1)
37+ $Tuple2([IntegerEntry((debugPrefix + "inputZ"), z), IntegerEntry((debugPrefix + "clampedZ"), clampedZ), IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
3838 }
3939
4040
4141 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
4242 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
4343 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
44- let $t015381591 = sigmoid(sum0, "Layer1N0")
45- let debugEntries0 = $t015381591._1
46- let sig0 = $t015381591._2
47- let $t015961649 = sigmoid(sum1, "Layer1N1")
48- let debugEntries1 = $t015961649._1
49- let sig1 = $t015961649._2
44+ let $t019802033 = sigmoid(sum0, "Layer1N0")
45+ let debugEntries0 = $t019802033._1
46+ let sig0 = $t019802033._2
47+ let $t020382091 = sigmoid(sum1, "Layer1N1")
48+ let debugEntries1 = $t020382091._1
49+ let sig1 = $t020382091._2
5050 let debugInfo = (debugEntries0 ++ debugEntries1)
5151 let output = [sig0, sig1]
5252 $Tuple2(debugInfo, output)
5555
5656 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
5757 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
58- let $t019592012 = sigmoid(sum0, "Layer2N0")
59- let debugEntries0 = $t019592012._1
60- let sig0 = $t019592012._2
58+ let $t024012454 = sigmoid(sum0, "Layer2N0")
59+ let debugEntries0 = $t024012454._1
60+ let sig0 = $t024012454._2
6161 let debugInfo = debugEntries0
6262 let output = sig0
6363 $Tuple2(debugInfo, output)
7373 then 1000000
7474 else 0
7575 let inputs = [scaledInput1, scaledInput2]
76- let $t023242422 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
77- let debugLayer1 = $t023242422._1
78- let layer1Output = $t023242422._2
79- let $t024272531 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
80- let debugLayer2 = $t024272531._1
81- let layer2Output = $t024272531._2
76+ let $t027662864 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
77+ let debugLayer1 = $t027662864._1
78+ let layer1Output = $t027662864._2
79+ let $t028692973 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
80+ let debugLayer2 = $t028692973._1
81+ let layer2Output = $t028692973._2
8282 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
8383 }
8484
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600497, 600733], [414197, 414253]]
4+let layer1Weights = [[600497, 600732], [414197, 414253]]
55
66 let layer1Biases = [-259050, -635637]
77
8-let layer2Weights = [[832965, -897142]]
8+let layer2Weights = [[832966, -897142]]
99
1010 let layer2Biases = [-381179]
1111
1212 func clampZ (z,limit) = if ((z > limit))
1313 then limit
1414 else if ((-(limit) > z))
1515 then -(limit)
1616 else z
1717
1818
1919 func exp_approx (x) = {
20- let base = 1000000
21- let maxExp = 200000
22- if ((-(maxExp) > x))
23- then 0
24- else if ((x > maxExp))
25- then (base * base)
26- else (base / (1 + (x / 10000)))
20+ let scaled_x = (x / 10000)
21+ let scaled_x2 = fraction(scaled_x, scaled_x, 1)
22+ let scaled_x3 = fraction(scaled_x2, scaled_x, 1)
23+ let exp_result = (((1000000 - scaled_x) + fraction(scaled_x2, 500000, 1, DOWN)) - fraction(scaled_x3, 6000000, 1, DOWN))
24+ if ((0 > x))
25+ then (1000000 + exp_result)
26+ else (1000000 - exp_result)
2727 }
2828
2929
3030 func sigmoid (z,debugPrefix) = {
3131 let clampedZ = clampZ(z, 100000)
3232 let positiveZ = if ((0 > z))
3333 then -(z)
3434 else z
3535 let expValue = exp_approx(-(positiveZ))
36- let sigValue = (1000000 / (1000000 + expValue))
37- $Tuple2([IntegerEntry((debugPrefix + "clampedZ"), clampedZ), IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
36+ let sigValue = fraction(1000000, (1000000 + expValue), 1)
37+ $Tuple2([IntegerEntry((debugPrefix + "inputZ"), z), IntegerEntry((debugPrefix + "clampedZ"), clampedZ), IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
3838 }
3939
4040
4141 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
4242 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
4343 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
44- let $t015381591 = sigmoid(sum0, "Layer1N0")
45- let debugEntries0 = $t015381591._1
46- let sig0 = $t015381591._2
47- let $t015961649 = sigmoid(sum1, "Layer1N1")
48- let debugEntries1 = $t015961649._1
49- let sig1 = $t015961649._2
44+ let $t019802033 = sigmoid(sum0, "Layer1N0")
45+ let debugEntries0 = $t019802033._1
46+ let sig0 = $t019802033._2
47+ let $t020382091 = sigmoid(sum1, "Layer1N1")
48+ let debugEntries1 = $t020382091._1
49+ let sig1 = $t020382091._2
5050 let debugInfo = (debugEntries0 ++ debugEntries1)
5151 let output = [sig0, sig1]
5252 $Tuple2(debugInfo, output)
5353 }
5454
5555
5656 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
5757 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
58- let $t019592012 = sigmoid(sum0, "Layer2N0")
59- let debugEntries0 = $t019592012._1
60- let sig0 = $t019592012._2
58+ let $t024012454 = sigmoid(sum0, "Layer2N0")
59+ let debugEntries0 = $t024012454._1
60+ let sig0 = $t024012454._2
6161 let debugInfo = debugEntries0
6262 let output = sig0
6363 $Tuple2(debugInfo, output)
6464 }
6565
6666
6767 @Callable(i)
6868 func predict (input1,input2) = {
6969 let scaledInput1 = if ((input1 == 1))
7070 then 1000000
7171 else 0
7272 let scaledInput2 = if ((input2 == 1))
7373 then 1000000
7474 else 0
7575 let inputs = [scaledInput1, scaledInput2]
76- let $t023242422 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
77- let debugLayer1 = $t023242422._1
78- let layer1Output = $t023242422._2
79- let $t024272531 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
80- let debugLayer2 = $t024272531._1
81- let layer2Output = $t024272531._2
76+ let $t027662864 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
77+ let debugLayer1 = $t027662864._1
78+ let layer1Output = $t027662864._2
79+ let $t028692973 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
80+ let debugLayer2 = $t028692973._1
81+ let layer2Output = $t028692973._2
8282 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
8383 }
8484
8585

github/deemru/w8io/169f3d6 
33.91 ms