tx · 8Tiq9PrpLGFMXBGGABe7MVibryxyyrcnbvTnKbFxbEWk

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.04.28 14:08 [3082637] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "8Tiq9PrpLGFMXBGGABe7MVibryxyyrcnbvTnKbFxbEWk", "fee": 1000000, "feeAssetId": null, "timestamp": 1714302541761, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "4wJ6pTXkDpSVRGqGJThxPwgwSkVwvm69FdSu1Z4KZG4TNQWj3EF4idHh3kSP7evTNx8zvt9rcAPbFA4UF7oqrTJP" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAkAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAACSmwCQAETAAAAAIAAAAAAAAJKp0FAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAAAGUfUJAARMAAAAAgAAAAAAAAZSLQUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP///////AwVCQAETAAAAAIA///////2TQsFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAAMtcYJAARMAAAAAgD///////JPigUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////+i8FBQAAAANuaWwBAAAABmNsYW1wWgAAAAIAAAABegAAAAVsaW1pdAMJAABmAAAAAgUAAAABegUAAAAFbGltaXQFAAAABWxpbWl0AwkAAGYAAAACCQEAAAABLQAAAAEFAAAABWxpbWl0BQAAAAF6CQEAAAABLQAAAAEFAAAABWxpbWl0BQAAAAF6AQAAAApleHBfYXBwcm94AAAAAQAAAAF4BAAAAAVhYnNfeAMJAABmAAAAAgAAAAAAAAAAAAUAAAABeAkBAAAAAS0AAAABBQAAAAF4BQAAAAF4BAAAAAphZGp1c3RlZF94AwkAAGYAAAACBQAAAAVhYnNfeAAAAAAAAAGGoAAAAAAAAAGGoAUAAAAFYWJzX3gEAAAACHNjYWxlZF94CQAAaQAAAAIFAAAACmFkanVzdGVkX3gAAAAAAAAAA+gJAABlAAAAAgAAAAAAAAGGoAkAAGgAAAACAAAAAAAAAAAKBQAAAAhzY2FsZWRfeAEAAAAHc2lnbW9pZAAAAAIAAAABegAAAAtkZWJ1Z1ByZWZpeAQAAAAIY2xhbXBlZFoJAQAAAAZjbGFtcFoAAAACBQAAAAF6AAAAAAAAAYagBAAAAAhleHBWYWx1ZQkBAAAACmV4cF9hcHByb3gAAAABCQEAAAABLQAAAAEFAAAACGNsYW1wZWRaBAAAAAhzaWdWYWx1ZQkBAAAACGZyYWN0aW9uAAAABAAAAAAAAA9CQAkAAGQAAAACAAAAAAAAD0JABQAAAAhleHBWYWx1ZQAAAAAAAAAAAQUAAAAERE9XTgkABRQAAAACCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAZpbnB1dFoFAAAAAXoJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAACGNsYW1wZWRaBQAAAAhjbGFtcGVkWgkABEwAAAACCQEAAAAMSW50ZWdlckVudHJ5AAAAAgkAASwAAAACBQAAAAtkZWJ1Z1ByZWZpeAIAAAAIZXhwVmFsdWUFAAAACGV4cFZhbHVlCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAhzaWdWYWx1ZQUAAAAIc2lnVmFsdWUFAAAAA25pbAUAAAAIc2lnVmFsdWUBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAAAAAVpbnB1dAAAAAd3ZWlnaHRzAAAABmJpYXNlcwAAAAtkZWJ1Z1ByZWZpeAQAAAAEc3VtMAkAAGQAAAACCQAAZAAAAAIJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAAJAABoAAAAAgkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAEJAABoAAAAAgkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAAAAAAAAAABhqAEAAAABHN1bTEJAABkAAAAAgkAAGQAAAACCQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAAACQAAaAAAAAIJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAABAAAAAAAAAAABCQAAaAAAAAIJAAGRAAAAAgUAAAAGYmlhc2VzAAAAAAAAAAABAAAAAAAAAYagBAAAAAskdDAxODU2MTkwOQkBAAAAB3NpZ21vaWQAAAACBQAAAARzdW0wAgAAAAhMYXllcjFOMAQAAAANZGVidWdFbnRyaWVzMAgFAAAACyR0MDE4NTYxOTA5AAAAAl8xBAAAAARzaWcwCAUAAAALJHQwMTg1NjE5MDkAAAACXzIEAAAACyR0MDE5MTQxOTY3CQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTECAAAACExheWVyMU4xBAAAAA1kZWJ1Z0VudHJpZXMxCAUAAAALJHQwMTkxNDE5NjcAAAACXzEEAAAABHNpZzEIBQAAAAskdDAxOTE0MTk2NwAAAAJfMgQAAAAJZGVidWdJbmZvCQAETgAAAAIFAAAADWRlYnVnRW50cmllczAFAAAADWRlYnVnRW50cmllczEEAAAABm91dHB1dAkABEwAAAACBQAAAARzaWcwCQAETAAAAAIFAAAABHNpZzEFAAAAA25pbAkABRQAAAACBQAAAAlkZWJ1Z0luZm8FAAAABm91dHB1dAEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAAEAAAABWlucHV0AAAAB3dlaWdodHMAAAAGYmlhc2VzAAAAC2RlYnVnUHJlZml4BAAAAARzdW0wCQAAZAAAAAIJAABkAAAAAgkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAAkAAGgAAAACCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAAAAAAAAAAAAAQkAAGgAAAACCQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAAAAAAAAAAGGoAQAAAALJHQwMjI3NzIzMzAJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMAIAAAAITGF5ZXIyTjAEAAAADWRlYnVnRW50cmllczAIBQAAAAskdDAyMjc3MjMzMAAAAAJfMQQAAAAEc2lnMAgFAAAACyR0MDIyNzcyMzMwAAAAAl8yBAAAAAlkZWJ1Z0luZm8FAAAADWRlYnVnRW50cmllczAEAAAABm91dHB1dAUAAAAEc2lnMAkABRQAAAACBQAAAAlkZWJ1Z0luZm8FAAAABm91dHB1dAAAAAEAAAABaQEAAAAHcHJlZGljdAAAAAIAAAAGaW5wdXQxAAAABmlucHV0MgQAAAAMc2NhbGVkSW5wdXQxAwkAAAAAAAACBQAAAAZpbnB1dDEAAAAAAAAAAAEAAAAAAAAPQkAAAAAAAAAAAAAEAAAADHNjYWxlZElucHV0MgMJAAAAAAAAAgUAAAAGaW5wdXQyAAAAAAAAAAABAAAAAAAAD0JAAAAAAAAAAAAABAAAAAZpbnB1dHMJAARMAAAAAgUAAAAMc2NhbGVkSW5wdXQxCQAETAAAAAIFAAAADHNjYWxlZElucHV0MgUAAAADbmlsBAAAAAskdDAyNjQyMjc0MAkBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAUAAAAGaW5wdXRzBQAAAA1sYXllcjFXZWlnaHRzBQAAAAxsYXllcjFCaWFzZXMCAAAABkxheWVyMQQAAAALZGVidWdMYXllcjEIBQAAAAskdDAyNjQyMjc0MAAAAAJfMQQAAAAMbGF5ZXIxT3V0cHV0CAUAAAALJHQwMjY0MjI3NDAAAAACXzIEAAAACyR0MDI3NDUyODQ5CQEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAAEBQAAAAxsYXllcjFPdXRwdXQFAAAADWxheWVyMldlaWdodHMFAAAADGxheWVyMkJpYXNlcwIAAAAGTGF5ZXIyBAAAAAtkZWJ1Z0xheWVyMggFAAAACyR0MDI3NDUyODQ5AAAAAl8xBAAAAAxsYXllcjJPdXRwdXQIBQAAAAskdDAyNzQ1Mjg0OQAAAAJfMgkABE4AAAACCQAETgAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAICAAAABnJlc3VsdAUAAAAMbGF5ZXIyT3V0cHV0BQAAAANuaWwFAAAAC2RlYnVnTGF5ZXIxBQAAAAtkZWJ1Z0xheWVyMgAAAACK0ZeQ", "height": 3082637, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: 5K3SesFjoRF1HkRqSncDmVU95ew5cKw5DxSrJdSM3sXu Next: GtG7mCapTmYceaRkKY1vhdnd2yLu3vXtwL6gnTomt2N9 Diff:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600496, 600733], [414197, 414252]]
4+let layer1Weights = [[600496, 600733], [414197, 414253]]
55
6-let layer1Biases = [-259050, -635637]
6+let layer1Biases = [-259051, -635637]
77
8-let layer2Weights = [[832965, -897142]]
8+let layer2Weights = [[832966, -897142]]
99
1010 let layer2Biases = [-381179]
1111
1717
1818
1919 func exp_approx (x) = {
20- let maxExp = 100000
21- if ((-(maxExp) > x))
22- then 1
23- else if ((x > maxExp))
24- then 1000000000
25- else {
26- let scaled_x = (x / 10000)
27- let scaled_x2 = fraction(scaled_x, scaled_x, 10000, DOWN)
28- let exp_result = ((10000 - scaled_x) + (scaled_x2 / 2))
29- (10000 - exp_result)
30- }
20+ let abs_x = if ((0 > x))
21+ then -(x)
22+ else x
23+ let adjusted_x = if ((abs_x > 100000))
24+ then 100000
25+ else abs_x
26+ let scaled_x = (adjusted_x / 1000)
27+ (100000 - (10 * scaled_x))
3128 }
3229
3330
3431 func sigmoid (z,debugPrefix) = {
3532 let clampedZ = clampZ(z, 100000)
3633 let expValue = exp_approx(-(clampedZ))
37- let sigValue = fraction(10000, (10000 + expValue), 1, DOWN)
38- $Tuple2([IntegerEntry((debugPrefix + "clampedZ"), clampedZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
34+ let sigValue = fraction(1000000, (1000000 + expValue), 1, DOWN)
35+ $Tuple2([IntegerEntry((debugPrefix + "inputZ"), z), IntegerEntry((debugPrefix + "clampedZ"), clampedZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
3936 }
4037
4138
4239 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
4340 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
4441 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
45- let $t017651818 = sigmoid(sum0, "Layer1N0")
46- let debugEntries0 = $t017651818._1
47- let sig0 = $t017651818._2
48- let $t018231876 = sigmoid(sum1, "Layer1N1")
49- let debugEntries1 = $t018231876._1
50- let sig1 = $t018231876._2
42+ let $t018561909 = sigmoid(sum0, "Layer1N0")
43+ let debugEntries0 = $t018561909._1
44+ let sig0 = $t018561909._2
45+ let $t019141967 = sigmoid(sum1, "Layer1N1")
46+ let debugEntries1 = $t019141967._1
47+ let sig1 = $t019141967._2
5148 let debugInfo = (debugEntries0 ++ debugEntries1)
5249 let output = [sig0, sig1]
5350 $Tuple2(debugInfo, output)
5653
5754 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
5855 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
59- let $t021862239 = sigmoid(sum0, "Layer2N0")
60- let debugEntries0 = $t021862239._1
61- let sig0 = $t021862239._2
56+ let $t022772330 = sigmoid(sum0, "Layer2N0")
57+ let debugEntries0 = $t022772330._1
58+ let sig0 = $t022772330._2
6259 let debugInfo = debugEntries0
6360 let output = sig0
6461 $Tuple2(debugInfo, output)
7471 then 1000000
7572 else 0
7673 let inputs = [scaledInput1, scaledInput2]
77- let $t025512649 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
78- let debugLayer1 = $t025512649._1
79- let layer1Output = $t025512649._2
80- let $t026542758 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
81- let debugLayer2 = $t026542758._1
82- let layer2Output = $t026542758._2
74+ let $t026422740 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
75+ let debugLayer1 = $t026422740._1
76+ let layer1Output = $t026422740._2
77+ let $t027452849 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
78+ let debugLayer2 = $t027452849._1
79+ let layer2Output = $t027452849._2
8380 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
8481 }
8582
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
4-let layer1Weights = [[600496, 600733], [414197, 414252]]
4+let layer1Weights = [[600496, 600733], [414197, 414253]]
55
6-let layer1Biases = [-259050, -635637]
6+let layer1Biases = [-259051, -635637]
77
8-let layer2Weights = [[832965, -897142]]
8+let layer2Weights = [[832966, -897142]]
99
1010 let layer2Biases = [-381179]
1111
1212 func clampZ (z,limit) = if ((z > limit))
1313 then limit
1414 else if ((-(limit) > z))
1515 then -(limit)
1616 else z
1717
1818
1919 func exp_approx (x) = {
20- let maxExp = 100000
21- if ((-(maxExp) > x))
22- then 1
23- else if ((x > maxExp))
24- then 1000000000
25- else {
26- let scaled_x = (x / 10000)
27- let scaled_x2 = fraction(scaled_x, scaled_x, 10000, DOWN)
28- let exp_result = ((10000 - scaled_x) + (scaled_x2 / 2))
29- (10000 - exp_result)
30- }
20+ let abs_x = if ((0 > x))
21+ then -(x)
22+ else x
23+ let adjusted_x = if ((abs_x > 100000))
24+ then 100000
25+ else abs_x
26+ let scaled_x = (adjusted_x / 1000)
27+ (100000 - (10 * scaled_x))
3128 }
3229
3330
3431 func sigmoid (z,debugPrefix) = {
3532 let clampedZ = clampZ(z, 100000)
3633 let expValue = exp_approx(-(clampedZ))
37- let sigValue = fraction(10000, (10000 + expValue), 1, DOWN)
38- $Tuple2([IntegerEntry((debugPrefix + "clampedZ"), clampedZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
34+ let sigValue = fraction(1000000, (1000000 + expValue), 1, DOWN)
35+ $Tuple2([IntegerEntry((debugPrefix + "inputZ"), z), IntegerEntry((debugPrefix + "clampedZ"), clampedZ), IntegerEntry((debugPrefix + "expValue"), expValue), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
3936 }
4037
4138
4239 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
4340 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
4441 let sum1 = (((input[0] * weights[1][0]) + (input[1] * weights[1][1])) + (biases[1] * 100000))
45- let $t017651818 = sigmoid(sum0, "Layer1N0")
46- let debugEntries0 = $t017651818._1
47- let sig0 = $t017651818._2
48- let $t018231876 = sigmoid(sum1, "Layer1N1")
49- let debugEntries1 = $t018231876._1
50- let sig1 = $t018231876._2
42+ let $t018561909 = sigmoid(sum0, "Layer1N0")
43+ let debugEntries0 = $t018561909._1
44+ let sig0 = $t018561909._2
45+ let $t019141967 = sigmoid(sum1, "Layer1N1")
46+ let debugEntries1 = $t019141967._1
47+ let sig1 = $t019141967._2
5148 let debugInfo = (debugEntries0 ++ debugEntries1)
5249 let output = [sig0, sig1]
5350 $Tuple2(debugInfo, output)
5451 }
5552
5653
5754 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
5855 let sum0 = (((input[0] * weights[0][0]) + (input[1] * weights[0][1])) + (biases[0] * 100000))
59- let $t021862239 = sigmoid(sum0, "Layer2N0")
60- let debugEntries0 = $t021862239._1
61- let sig0 = $t021862239._2
56+ let $t022772330 = sigmoid(sum0, "Layer2N0")
57+ let debugEntries0 = $t022772330._1
58+ let sig0 = $t022772330._2
6259 let debugInfo = debugEntries0
6360 let output = sig0
6461 $Tuple2(debugInfo, output)
6562 }
6663
6764
6865 @Callable(i)
6966 func predict (input1,input2) = {
7067 let scaledInput1 = if ((input1 == 1))
7168 then 1000000
7269 else 0
7370 let scaledInput2 = if ((input2 == 1))
7471 then 1000000
7572 else 0
7673 let inputs = [scaledInput1, scaledInput2]
77- let $t025512649 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
78- let debugLayer1 = $t025512649._1
79- let layer1Output = $t025512649._2
80- let $t026542758 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
81- let debugLayer2 = $t026542758._1
82- let layer2Output = $t026542758._2
74+ let $t026422740 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
75+ let debugLayer1 = $t026422740._1
76+ let layer1Output = $t026422740._2
77+ let $t027452849 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
78+ let debugLayer2 = $t027452849._1
79+ let layer2Output = $t027452849._2
8380 (([IntegerEntry("result", layer2Output)] ++ debugLayer1) ++ debugLayer2)
8481 }
8582
8683

github/deemru/w8io/026f985 
34.03 ms