tx · CS21i4va4bE4wpJLLVcMuLnJmUSs6ayZzKEjCtJzd3f3

3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY:  -0.01000000 Waves

2024.04.15 23:00 [3064333] smart account 3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY > SELF 0.00000000 Waves

{ "type": 13, "id": "CS21i4va4bE4wpJLLVcMuLnJmUSs6ayZzKEjCtJzd3f3", "fee": 1000000, "feeAssetId": null, "timestamp": 1713211229821, "version": 2, "chainId": 84, "sender": "3N3n75UqB8G1GKmXFr4zPhKCjGcqJPRSuJY", "senderPublicKey": "2AWdnJuBMzufXSjTvzVcawBQQhnhF1iXR6QNVgwn33oc", "proofs": [ "33ijew3F8tZZB8zxehKgwkJDBSrdxVQ1TsTrbr4pfietpGz5BVH5QV6kTxWMrGiWb2VKdWspQEAFwTXjj4igx2cE" ], "script": "base64:AAIFAAAAAAAAAAgIAhIECgIBAQAAAAcAAAAADWxheWVyMVdlaWdodHMJAARMAAAAAgkABEwAAAACAAAAAAAACSmxCQAETAAAAAIAAAAAAAAJKp0FAAAAA25pbAkABEwAAAACCQAETAAAAAIAAAAAAAAGUfUJAARMAAAAAgAAAAAAAAZSLQUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMUJpYXNlcwkABEwAAAACAP///////AwWCQAETAAAAAIA///////2TQsFAAAAA25pbAAAAAANbGF5ZXIyV2VpZ2h0cwkABEwAAAACCQAETAAAAAIAAAAAAAAMtcYJAARMAAAAAgD///////JPigUAAAADbmlsBQAAAANuaWwAAAAADGxheWVyMkJpYXNlcwkABEwAAAACAP//////+i8FBQAAAANuaWwBAAAAB3NpZ21vaWQAAAACAAAAAXoAAAALZGVidWdQcmVmaXgEAAAAAWUAAAAAAAApekkEAAAABGJhc2UAAAAAAAAPQkAEAAAACXBvc2l0aXZlWgMJAABmAAAAAgAAAAAAAAAAAAUAAAABegkBAAAAAS0AAAABBQAAAAF6BQAAAAF6BAAAAAdleHBQYXJ0CQAAawAAAAMFAAAAAWUFAAAABGJhc2UFAAAACXBvc2l0aXZlWgQAAAAIc2lnVmFsdWUJAABrAAAAAwUAAAAEYmFzZQUAAAAEYmFzZQkAAGQAAAACBQAAAARiYXNlBQAAAAdleHBQYXJ0CQAFFAAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAIJAAEsAAAAAgUAAAALZGVidWdQcmVmaXgCAAAACXBvc2l0aXZlWgUAAAAJcG9zaXRpdmVaCQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAdleHBQYXJ0BQAAAAdleHBQYXJ0CQAETAAAAAIJAQAAAAxJbnRlZ2VyRW50cnkAAAACCQABLAAAAAIFAAAAC2RlYnVnUHJlZml4AgAAAAhzaWdWYWx1ZQUAAAAIc2lnVmFsdWUFAAAAA25pbAUAAAAIc2lnVmFsdWUBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAAAAAVpbnB1dAAAAAd3ZWlnaHRzAAAABmJpYXNlcwAAAAtkZWJ1Z1ByZWZpeAQAAAAEc3VtMAkAAGQAAAACCQAAZAAAAAIJAABrAAAAAwkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAAkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPQkAJAABrAAAAAwkAAZEAAAACBQAAAAVpbnB1dAAAAAAAAAAAAQkAAZEAAAACCQABkQAAAAIFAAAAB3dlaWdodHMAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAPQkAJAAGRAAAAAgUAAAAGYmlhc2VzAAAAAAAAAAAABAAAAARzdW0xCQAAZAAAAAIJAABkAAAAAgkAAGsAAAADCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAAACQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAA9CQAkAAGsAAAADCQABkQAAAAIFAAAABWlucHV0AAAAAAAAAAABCQABkQAAAAIJAAGRAAAAAgUAAAAHd2VpZ2h0cwAAAAAAAAAAAQAAAAAAAAAAAQAAAAAAAA9CQAkAAZEAAAACBQAAAAZiaWFzZXMAAAAAAAAAAAEEAAAACyR0MDExNzIxMjE4CQEAAAAHc2lnbW9pZAAAAAIFAAAABHN1bTACAAAACExheWVyMU4wBAAAAAZkZWJ1ZzAIBQAAAAskdDAxMTcyMTIxOAAAAAJfMQQAAAAEc2lnMAgFAAAACyR0MDExNzIxMjE4AAAAAl8yBAAAAAskdDAxMjIzMTI2OQkBAAAAB3NpZ21vaWQAAAACBQAAAARzdW0xAgAAAAhMYXllcjFOMQQAAAAGZGVidWcxCAUAAAALJHQwMTIyMzEyNjkAAAACXzEEAAAABHNpZzEIBQAAAAskdDAxMjIzMTI2OQAAAAJfMgkABRQAAAACCQAETAAAAAIFAAAABHNpZzAJAARMAAAAAgUAAAAEc2lnMQUAAAADbmlsCQAETgAAAAIFAAAABmRlYnVnMAUAAAAGZGVidWcxAQAAABFmb3J3YXJkUGFzc0xheWVyMgAAAAQAAAAFaW5wdXQAAAAHd2VpZ2h0cwAAAAZiaWFzZXMAAAALZGVidWdQcmVmaXgEAAAABHN1bTAJAABkAAAAAgkAAGQAAAACCQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAAJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD0JACQAAawAAAAMJAAGRAAAAAgUAAAAFaW5wdXQAAAAAAAAAAAEJAAGRAAAAAgkAAZEAAAACBQAAAAd3ZWlnaHRzAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAD0JACQABkQAAAAIFAAAABmJpYXNlcwAAAAAAAAAAAAQAAAALJHQwMTUzODE1ODQJAQAAAAdzaWdtb2lkAAAAAgUAAAAEc3VtMAIAAAAITGF5ZXIyTjAEAAAABmRlYnVnMAgFAAAACyR0MDE1MzgxNTg0AAAAAl8xBAAAAARzaWcwCAUAAAALJHQwMTUzODE1ODQAAAACXzIJAAUUAAAAAgkABEwAAAACBQAAAARzaWcwBQAAAANuaWwFAAAABmRlYnVnMAAAAAEAAAABaQEAAAAHcHJlZGljdAAAAAIAAAAGaW5wdXQxAAAABmlucHV0MgQAAAAMc2NhbGVkSW5wdXQxAwkAAAAAAAACBQAAAAZpbnB1dDEAAAAAAAAAAAEAAAAAAAAPQkAAAAAAAAAAAAAEAAAADHNjYWxlZElucHV0MgMJAAAAAAAAAgUAAAAGaW5wdXQyAAAAAAAAAAABAAAAAAAAD0JAAAAAAAAAAAAABAAAAAZpbnB1dHMJAARMAAAAAgUAAAAMc2NhbGVkSW5wdXQxCQAETAAAAAIFAAAADHNjYWxlZElucHV0MgUAAAADbmlsBAAAAAskdDAxODM3MTkzNQkBAAAAEWZvcndhcmRQYXNzTGF5ZXIxAAAABAUAAAAGaW5wdXRzBQAAAA1sYXllcjFXZWlnaHRzBQAAAAxsYXllcjFCaWFzZXMCAAAABkxheWVyMQQAAAAMbGF5ZXIxT3V0cHV0CAUAAAALJHQwMTgzNzE5MzUAAAACXzEEAAAAC2RlYnVnTGF5ZXIxCAUAAAALJHQwMTgzNzE5MzUAAAACXzIEAAAACyR0MDE5NDAyMDQ0CQEAAAARZm9yd2FyZFBhc3NMYXllcjIAAAAEBQAAAAxsYXllcjFPdXRwdXQFAAAADWxheWVyMldlaWdodHMFAAAADGxheWVyMkJpYXNlcwIAAAAGTGF5ZXIyBAAAAAxsYXllcjJPdXRwdXQIBQAAAAskdDAxOTQwMjA0NAAAAAJfMQQAAAALZGVidWdMYXllcjIIBQAAAAskdDAxOTQwMjA0NAAAAAJfMgkABE4AAAACCQAETgAAAAIJAARMAAAAAgkBAAAADEludGVnZXJFbnRyeQAAAAICAAAABnJlc3VsdAkAAZEAAAACBQAAAAxsYXllcjJPdXRwdXQAAAAAAAAAAAAFAAAAA25pbAUAAAALZGVidWdMYXllcjEFAAAAC2RlYnVnTGF5ZXIyAAAAAOdBDRI=", "height": 3064333, "applicationStatus": "succeeded", "spentComplexity": 0 } View: original | compacted Prev: 7PT7Kpvh7R2ojLzQAxqjD2to4ryUdME3RKSJ8hwMgmYZ Next: M1cUmR4buw3m1ApiJjeGXmKDuNfXDeYV5g723kcmemh Diff:
OldNewDifferences
3535
3636
3737 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
38- let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[0], weights[0][1], 1000000)) + biases[0])
39- let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[0], weights[1][1], 1000000)) + biases[1])
40- let $t016531699 = sigmoid(sum0, "Layer2N0")
41- let debug0 = $t016531699._1
42- let sig0 = $t016531699._2
43- let $t017041750 = sigmoid(sum1, "Layer2N1")
44- let debug1 = $t017041750._1
45- let sig1 = $t017041750._2
46- $Tuple2([sig0, sig1], (debug0 ++ debug1))
38+ let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
39+ let $t015381584 = sigmoid(sum0, "Layer2N0")
40+ let debug0 = $t015381584._1
41+ let sig0 = $t015381584._2
42+ $Tuple2([sig0], debug0)
4743 }
4844
4945
5652 then 1000000
5753 else 0
5854 let inputs = [scaledInput1, scaledInput2]
59- let $t020192117 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
60- let layer1Output = $t020192117._1
61- let debugLayer1 = $t020192117._2
62- let $t021222226 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
63- let layer2Output = $t021222226._1
64- let debugLayer2 = $t021222226._2
55+ let $t018371935 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
56+ let layer1Output = $t018371935._1
57+ let debugLayer1 = $t018371935._2
58+ let $t019402044 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
59+ let layer2Output = $t019402044._1
60+ let debugLayer2 = $t019402044._2
6561 (([IntegerEntry("result", layer2Output[0])] ++ debugLayer1) ++ debugLayer2)
6662 }
6763
Full:
OldNewDifferences
11 {-# STDLIB_VERSION 5 #-}
22 {-# SCRIPT_TYPE ACCOUNT #-}
33 {-# CONTENT_TYPE DAPP #-}
44 let layer1Weights = [[600497, 600733], [414197, 414253]]
55
66 let layer1Biases = [-259050, -635637]
77
88 let layer2Weights = [[832966, -897142]]
99
1010 let layer2Biases = [-381179]
1111
1212 func sigmoid (z,debugPrefix) = {
1313 let e = 2718281
1414 let base = 1000000
1515 let positiveZ = if ((0 > z))
1616 then -(z)
1717 else z
1818 let expPart = fraction(e, base, positiveZ)
1919 let sigValue = fraction(base, base, (base + expPart))
2020 $Tuple2([IntegerEntry((debugPrefix + "positiveZ"), positiveZ), IntegerEntry((debugPrefix + "expPart"), expPart), IntegerEntry((debugPrefix + "sigValue"), sigValue)], sigValue)
2121 }
2222
2323
2424 func forwardPassLayer1 (input,weights,biases,debugPrefix) = {
2525 let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
2626 let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[1], weights[1][1], 1000000)) + biases[1])
2727 let $t011721218 = sigmoid(sum0, "Layer1N0")
2828 let debug0 = $t011721218._1
2929 let sig0 = $t011721218._2
3030 let $t012231269 = sigmoid(sum1, "Layer1N1")
3131 let debug1 = $t012231269._1
3232 let sig1 = $t012231269._2
3333 $Tuple2([sig0, sig1], (debug0 ++ debug1))
3434 }
3535
3636
3737 func forwardPassLayer2 (input,weights,biases,debugPrefix) = {
38- let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[0], weights[0][1], 1000000)) + biases[0])
39- let sum1 = ((fraction(input[0], weights[1][0], 1000000) + fraction(input[0], weights[1][1], 1000000)) + biases[1])
40- let $t016531699 = sigmoid(sum0, "Layer2N0")
41- let debug0 = $t016531699._1
42- let sig0 = $t016531699._2
43- let $t017041750 = sigmoid(sum1, "Layer2N1")
44- let debug1 = $t017041750._1
45- let sig1 = $t017041750._2
46- $Tuple2([sig0, sig1], (debug0 ++ debug1))
38+ let sum0 = ((fraction(input[0], weights[0][0], 1000000) + fraction(input[1], weights[0][1], 1000000)) + biases[0])
39+ let $t015381584 = sigmoid(sum0, "Layer2N0")
40+ let debug0 = $t015381584._1
41+ let sig0 = $t015381584._2
42+ $Tuple2([sig0], debug0)
4743 }
4844
4945
5046 @Callable(i)
5147 func predict (input1,input2) = {
5248 let scaledInput1 = if ((input1 == 1))
5349 then 1000000
5450 else 0
5551 let scaledInput2 = if ((input2 == 1))
5652 then 1000000
5753 else 0
5854 let inputs = [scaledInput1, scaledInput2]
59- let $t020192117 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
60- let layer1Output = $t020192117._1
61- let debugLayer1 = $t020192117._2
62- let $t021222226 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
63- let layer2Output = $t021222226._1
64- let debugLayer2 = $t021222226._2
55+ let $t018371935 = forwardPassLayer1(inputs, layer1Weights, layer1Biases, "Layer1")
56+ let layer1Output = $t018371935._1
57+ let debugLayer1 = $t018371935._2
58+ let $t019402044 = forwardPassLayer2(layer1Output, layer2Weights, layer2Biases, "Layer2")
59+ let layer2Output = $t019402044._1
60+ let debugLayer2 = $t019402044._2
6561 (([IntegerEntry("result", layer2Output[0])] ++ debugLayer1) ++ debugLayer2)
6662 }
6763
6864

github/deemru/w8io/169f3d6 
47.44 ms