Im trying to use exactInput() function for UniV3 interface but when trying to execute the code the transactions fails https://goerli.etherscan.io/tx/0xb0d5e4b491610b9db8d98cc938008ba2a4e1a06e67b05ed87ac6c0ca3ad61dab
I know eth send shows 0 in this one but even especifying amount it fails, I dont know what to change..
I have checked many codes out there and cant see the mistake, please could someone give me some advice?
const {abi: V3SwapRouterABI} = require('#uniswap/v3-periphery/artifacts/contracts/interfaces/ISwapRouter.sol/ISwapRouter.json')
const { ethers } = require("ethers")
require("dotenv").config()
const INFURA_URL_TESTNET = process.env.INFURA_URL_TESTNET
const PRIVATE_KEY = process.env.PRIVATE_KEY
const WALLET_ADDRESS = process.env.WALLET_ADDRESS
// now you can call sendTransaction
const wethToken= "0xB4FBF271143F4FBf7B91A5ded31805e42b2208d6"
const Uni= "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984"
const UniswapRouter="0x68b3465833fb72A70ecDF485E0e4C7bD8665Fc45"
const UniV3Contract = new ethers.Contract(
UniswapRouter,
V3SwapRouterABI
)
const provider = new ethers.providers.JsonRpcProvider(INFURA_URL_TESTNET)
const wallet = new ethers.Wallet(PRIVATE_KEY)
const signer = wallet.connect(provider)
const FEE_SIZE = 3
function encodePath(path, fees) {
if (path.length != fees.length + 1) {
throw new Error('path/fee lengths do not match')
}
let encoded = '0x'
for (let i = 0; i < fees.length; i++) {
// 20 byte encoding of the address
encoded += path[i].slice(2)
// 3 byte encoding of the fee
encoded += fees[i].toString(16).padStart(2 * FEE_SIZE, '0')
}
// encode the final token
encoded += path[path.length - 1].slice(2)
return encoded.toLowerCase()
}
async function getToken() {
const path = encodePath([wethToken, Uni], [3000])
const deadline = Math.floor(Date.now()/1000) + (60*10)
const params = {
path: path,
recipient: WALLET_ADDRESS,
deadline: deadline,
amountIn: ethers.utils.parseEther('0.01'),
amountOutMinimum: 0
}
const encodedData = UniV3Contract.interface.encodeFunctionData("exactInput", [params])
const txArg = {
to: UniswapRouter,
from: WALLET_ADDRESS,
data: encodedData,
gasLimit: ethers.utils.hexlify(1000000)
}
const tx = await signer.sendTransaction(txArg)
console.log('tx: ', tx)
const receipt = tx.wait()
console.log('receipt: ', receipt)
}
module.exports = { getToken
You will need to remove the Deadline.. The new router 0x68b3465833fb72A70ecDF485E0e4C7bD8665Fc45 moved deadline to the multi-call function (since the router is designed to be multi-call)
Related
developing an VS code extension where
search for color hex code in whole css document
replace the color hex code with variable name
although it match all color hex code but replace the only first instance and stops
below is the code snippet
export function activate(context: vscode.ExtensionContext) {
let activeEditor = vscode.window.activeTextEditor;
function replaceWithinDocument() {
if (!activeEditor) {
return;
}
const text = activeEditor.document.getText();
const reg = new RegExp('(?<color>#[0-9a-f]{3,6})', 'gim');
const matches = text.matchAll(reg);
const variableList = {};
let i = 0;
for (const match of matches) {
const { index, groups } = match;
i++;
console.log({ match });
const startPos = activeEditor.document.positionAt(index!);
const endPos = activeEditor.document.positionAt(index! + match[0].length);
console.log({ i, startPos, endPos });
//Creating a new range with startLine, startCharacter & endLine, endCharacter.
let range = new vscode.Range(startPos, endPos);
// eslint-disable-next-line #typescript-eslint/naming-convention
Object.assign(variableList, { [`--var-${i}`]: groups?.color });
activeEditor.edit(editBuilder => {
editBuilder.replace(range, `--var-${i}`);
});
}
console.log({ variableList });
}
function triggerUpdateDecorations(throttle = false) {
if (timeout) {
clearTimeout(timeout);
timeout = undefined;
}
if (throttle) {
timeout = setTimeout(replaceWithinDocument, 500);
} else {
replaceWithinDocument();
}
}
if (activeEditor) {
triggerUpdateDecorations();
}
final document
body {
background-color: --var-1;
color: #223344;
}
you can see in the screenshot that console.log({ variableList }); have both color code in it
so what is wrong here?
See allow delay between edits via vscode extension api. Because of the particular nature of the editBuilder object
The editBuilder "expires" once you return from the callback passed to
TextEditor.edit.
you should put your matches loop inside the call to the edit call like this sample code:
// get your matches above first
editor.edit(editBuilder => {
let i = 0;
for (const match of matches) {
// build your replacement here
const matchStartPos = document.positionAt(match.index);
const matchEndPos = document.positionAt(match.index + match[0].length);
const matchRange = new Range(matchStartPos, matchEndPos);
editBuilder.replace(matchRange, resolvedReplace);
}
}).then(async (resolved) => {
});
I am processing an audio buffer with an OfflineAudioContext with the following node layout:
[AudioBufferSourceNode] -> [AnalyserNode] -> [OfflineAudioContext]
This works very good on Chrome (106.0.5249.119) but on Safari 16 (17614.1.25.9.10, 17614) each time I run the analysis takes longer and longer. Both running on macOS.
What's curious is that I must quit Safari to "reset" the processing time.
I guess there's a memory leak?
Is there anything that I'm doing wrong in the JavaScript code that would cause Safari to not garbage collect?
async function processFrequencyData(
audioBuffer,
options
) {
const {
fps,
numberOfSamples,
maxDecibels,
minDecibels,
smoothingTimeConstant,
} = options;
const frameFrequencies = [];
const oc = new OfflineAudioContext({
length: audioBuffer.length,
sampleRate: audioBuffer.sampleRate,
numberOfChannels: audioBuffer.numberOfChannels,
});
const lengthInMillis = 1000 * (audioBuffer.length / audioBuffer.sampleRate);
const source = new AudioBufferSourceNode(oc);
source.buffer = audioBuffer;
const az = new AnalyserNode(oc, {
fftSize: numberOfSamples * 2,
smoothingTimeConstant,
minDecibels,
maxDecibels,
});
source.connect(az).connect(oc.destination);
const msPerFrame = 1000 / fps;
let currentFrame = 0;
function process() {
const frequencies = new Uint8Array(az.frequencyBinCount);
az.getByteFrequencyData(frequencies);
// const times = new number[](az.frequencyBinCount);
// az.getByteTimeDomainData(times);
frameFrequencies[currentFrame] = frequencies;
const nextTime = (currentFrame + 1) * msPerFrame;
if (nextTime < lengthInMillis) {
currentFrame++;
const nextTimeSeconds = (currentFrame * msPerFrame) / 1000;
oc.suspend(nextTimeSeconds).then(process);
}
oc.resume();
}
oc.suspend(0).then(process);
source.start(0);
await oc.startRendering();
return frameFrequencies;
}
const buttonsDiv = document.createElement('div');
document.body.appendChild(buttonsDiv);
const initButton = document.createElement('button');
initButton.onclick = init;
initButton.innerHTML = 'Load audio'
buttonsDiv.appendChild(initButton);
const processButton = document.createElement('button');
processButton.disabled = true;
processButton.innerHTML = 'Process'
buttonsDiv.appendChild(processButton);
const resultElement = document.createElement('pre');
document.body.appendChild(resultElement)
async function init() {
initButton.disabled = true;
resultElement.innerText += 'Loading audio... ';
const audioContext = new AudioContext();
const arrayBuffer = await fetch('https://gist.githubusercontent.com/marcusstenbeck/da36a5fc2eeeba14ae9f984a580db1da/raw/84c53582d3936ac78625a31029022c8fdb734b2a/base64audio.txt').then(r => r.text()).then(fetch).then(r => r.arrayBuffer())
resultElement.innerText += 'finished.';
resultElement.innerText += '\nDecoding audio... ';
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
resultElement.innerText += 'finished.';
processButton.onclick = async () => {
processButton.disabled = true;
resultElement.innerText += '\nStart processing... ';
const t0 = Date.now();
await processFrequencyData(audioBuffer, {
fps: 30,
numberOfSamples: 2 ** 13,
maxDecibels: -25,
minDecibels: -70,
smoothingTimeConstant: 0.2,
});
resultElement.innerText += `finished in ${Date.now() - t0} ms`;
processButton.disabled = false;
};
processButton.disabled = false;
}
I guess this is really a bug in Safari. I'm able to reproduce it by rendering an OfflineAudioContext without any nodes. As soon as I use suspend()/resume() every invocation takes a little longer.
I'm only speculating here but I think it's possible that there is some internal mechanism which tries to prevent the rapid back and forth between the audio thread and the main thread. It almost feels like one of those login forms which takes a bit longer to validate the password every time you try.
Anyway I think you can avoid using suspend()/resume() for your particular use case. It should be possible to create an OfflineAudioContext for each of the slices instead. In order to get the same effect you would only render the particular slice with each OfflineAudioContext.
const currentTime = 0;
while (currentTime < duration) {
const offlineAudioContext = new OfflineAudioContext({
length: LENGTH_OF_ONE_SLICE,
sampleRate
});
const audioBufferSourceNode = new AudioBufferSourceNode(
offlineAudioContext,
{
buffer
}
);
const analyserNode = new AnalyserNode(offlineAudioContext);
audioBufferSourceNode.start(0, currentTime);
audioBufferSourceNode
.connect(analyserNode)
.connect(offlineAudioContext.destination);
await offlineAudioContext.startRendering();
const frequencies = new Uint8Array(analyserNode.frequencyBinCount);
analyserNode.getByteFrequencyData(frequencies);
// do something with the frequencies ...
currentTime += LENGTH_OF_ONE_SLICE * sampleRate;
}
I think the only thing missing would be the smoothing since each of those slices will have it's own AnalyserNode.
In my worker I am converting a base64 string I get from the request to a blob with some function. However, when I try to PUT the blob into my bucket, I get "Network Connection Lost" error. I can successfully PUT just the base64 string or any other string but not a blob. Here is my worker:
// Function to convert b64 to blob (working fine)
function b64toBlob(b64Data, contentType, sliceSize=512) {
const byteCharacters = atob(b64Data);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, {type: contentType});
return blob;
}
export default {
async fetch(request, env) {
const url = new URL(request.url);
const key = url.pathname.slice(1);
switch (request.method) {
case 'PUT':
const contentType = 'application/pdf';
const b64Data = request.body;
const blob = b64toBlob(b64Data, contentType);
try {
await env.qa_sub_agreements_bucket.put(key, blob, { // Failing here
httpMetadata: request.headers,
})
return new Response(blob) // Successfully returns the blob when above PUT is commented out
} catch (e) {
console.error(e.message, e.stack); // Logs out "Error: Network Connection Lost"
}
Hard to say definitively because the Worker posted doesn't appear to be totally complete. An eagle-eyed coworker spotted that it looks like the problem may be that you're invoking atob on a ReadableStream and likely that conversion is what's throwing the exception.
I am using React Native to build Mobile application for Andrioid and iOS.
based on the situation that no framework is exist to support Azure Storage API for React Native (all frameworks are required browsers that does not exist in React Native),
I use REST API for the interaction with the Azure storage and it works fine e.g list containers, list blob, get blob and put blob.
in order to upload large file I tried to use the same mechanizm for 'put block' api (as describe here: https://learn.microsoft.com/en-us/rest/api/storageservices/put-block) without succcess, failed on error code 403.
I will appreciate for your assist.
Thank you.
my code for upload single block:
private createAuthorizationHeader(canonicalizedString: string) {
const str = CryptoJS.HmacSHA256(canonicalizedString, CryptoJS.enc.Base64.parse(this.config.accountKey));
const sig = CryptoJS.enc.Base64.stringify(str);
const authorizationHeader = `SharedKey ${this.config.accountName}:${sig}`;
return authorizationHeader;
}
async putBlockBlob(containerName: str, blobPath: str, blobContent: str, blockIndex: number,) {
const requestMethod = 'PUT';
const urlPath = `${containerName}/${blobPath}`;
const dateInRfc1123Format = new Date(Date.now()).toUTCString();
const storageServiceVersion = '2019-12-12';
const blobLength: number = blobContent.length;
const blockId = Buffer.from(`block-${blockIndex}`).toString('base64');
const blobType = 'BlockBlob';
// StringToSign =
// VERB + "\n" +
// Content-Encoding + "\n" +
// Content-Language + "\n" +
// Content-Length + "\n" +
// Content-MD5 + "\n" +
// Content-Type + "\n" +
// Date + "\n" +
// If-Modified-Since + "\n" +
// If-Match + "\n" +
// If-None-Match + "\n" +
// If-Unmodified-Since + "\n" +
// Range + "\n" +
// CanonicalizedHeaders +
// CanonicalizedResource;
const canonicalizedHeaders = `x-ms-date:${dateInRfc1123Format}\nx-ms-version:${storageServiceVersion}`;
const canonicalizedResource = `/${this.config.accountName}/${urlPath}}\nblockid:${blockId}\ncomp:block`;
const stringToSign = `${requestMethod}\n\n\n${blobLength}\n\napplication/octet-stream\n\n\n\n\n\n\n${canonicalizedHeaders}\n${canonicalizedResource}`;
const uriStr = `${urlPath}?comp=block&blockid=${blockId}`;
const authorizationHeader = this.createAuthorizationHeader(stringToSign);
const header = {
'cache-control': 'no-cache',
'x-ms-date': dateInRfc1123Format,
'x-ms-version': storageServiceVersion,
Authorization: authorizationHeader,
'Content-Length': `${blobLength}`,
'Content-Type': 'application/octet-stream',
};
try {
return axios
.create({baseURL: `https://${this.config.accountName}.blob.core.windows.net/`,})
.request({
method: requestMethod,
url: uriStr,
data: blobContent,
headers: header,
})
.then((response) => response.data)
.catch((err) => {
throw err;
});
} catch (err) {
console.log(err);
throw err;
}
}
I believe the issue is coming because of a missing new line character between Range and CanonicalizedHeaders.
Can you try by changing the following line of code:
const stringToSign = `${requestMethod}\n\n\n${blobLength}\n\napplication/octet-stream\n\n\n\n\n\n\n${canonicalizedHeaders}\n${canonicalizedResource}`;
to:
const stringToSign = `${requestMethod}\n\n\n${blobLength}\n\napplication/octet-stream\n\n\n\n\n\n\n\n${canonicalizedHeaders}\n${canonicalizedResource}`;
it will help you to upload the data to Azure storage server
upload file to Server
export const uploadMedia = async (params: any, callBack: any) => {
const SAS_URL: any = "https://${blobUrl}.blob.core.windows.net";
const CONTAINER: any = "";
const SAS_TOKEN: any = "";
const { fileType, localUri } = params;
const userId = "set user ID here";
const fileName = String(fileType).concat(customIdGenerator(7));
const assetPath = `${SAS_URL}/${CONTAINER}/${userId}/${fileName}`;
HEADER["x-ms-blob-content-type"] = CONST_HEADER(fileType);
return await RNFetchBlob.fetch(
"PUT",
`${assetPath}?${SAS_TOKEN}`,
HEADER,
RNFetchBlob.wrap(localUri)
)
?.uploadProgress(callBack)
.then(() => {
return assetPath;
});
};
fileType = 'video' | image | pdf
let params: any = {
fileType: 'image',
localUri: image,
};
generate Custom Id for Uniqueness or you can also use UUID
const customIdGenerator = (length: any) => {
var result = "";
var characters =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
var charactersLength = characters.length;
for (var i = 0; i < length; i++) {
result += characters.charAt(Math.floor(Math.random() * charactersLength));
}
return result;
};
set Headers for different Files
const CONST_HEADER = (type: any) => {
return type == 'image'
? `image/png`
: type == 'video'
? 'video/mp4'
: type == 'pdf' && 'application/pdf';
};
error message (code attached below)
node_modules\#ethersproject\logger\lib\index.js:180
var error = new Error(message);
^
Error: transaction failed (transactionHash="0x03e0911d26d2175d55b233b4a7b17d06202e7c2fb52a2ecfd35f3863814cb374", transaction={"nonce":364,"gasPrice":{"type":"BigNumber","hex":"0x02540be400"},"gasLimit":{"type":"BigNumber","hex":"0x7a1200"},"to":"0x10ED43C718714eb63d5aA57B78B54704E256024E","value":{"type":"BigNumber","hex":"0x00"},"data":"0xa5be382e000000000000000000000000000000000000000000000000002386f26fc10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000003975383df42df8ed045d636b255bf0829d8d5970000000000000000000000000000000000000000000000000000000006698a9df0000000000000000000000000000000000000000000000000000000000000002000000000000000000000000bb4cdb9cbd36b01bd1cbaebf2de08d9173bc095c00000000000000000000000055d398326f99059ff775485246999027b3197955","chainId":56,"v":147,"r":"0xc1a926e6f3989a50185b2d75c4bc877a76c8a2f30d505a1055fa89271feba035","s":"0x0ebeb2da65954541c82ddf468177cc6a49324a0e7a6e3fb3f5795f41c1055261","from":"0x3975383Df42Df8ED045d636b255Bf0829d8D5970","hash":"0x03e0911d26d2175d55b233b4a7b17d06202e7c2fb52a2ecfd35f3863814cb374","type":null}, receipt={"to":"0x10ED43C718714eb63d5aA57B78B54704E256024E","from":"0x3975383Df42Df8ED045d636b255Bf0829d8D5970","contractAddress":null,"transactionIndex":19,"gasUsed":{"type":"BigNumber","hex":"0x5a67"},"logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","blockHash":"0xd504dfc3e7703e7657239e04f0d36e8118633689395cfc6fb9b3d6f213893724","transactionHash":"0x03e0911d26d2175d55b233b4a7b17d06202e7c2fb52a2ecfd35f3863814cb374","logs":[],"blockNumber":7497373,"confirmations":1,"cumulativeGasUsed":{"type":"BigNumber","hex":"0x216ce6"},"status":0,"byzantium":true}, code=CALL_EXCEPTION, version=providers/5.1.2)
at Logger.makeError (D:\codeforFun\322-uniswap-trading-bot\node_modules\#ethersproject\logger\lib\index.js:180:21)
at Logger.throwError (D:\codeforFun\322-uniswap-trading-bot\node_modules\#ethersproject\logger\lib\index.js:189:20)
at JsonRpcProvider.<anonymous> (D:\codeforFun\322-uniswap-trading-bot\node_modules\#ethersproject\providers\lib\base-provider.js:1162:36)
at step (D:\codeforFun\322-uniswap-trading-bot\node_modules\#ethersproject\providers\lib\base-provider.js:48:23)
at Object.next (D:\codeforFun\322-uniswap-trading-bot\node_modules\#ethersproject\providers\lib\base-provider.js:29:53)
at fulfilled (D:\codeforFun\322-uniswap-trading-bot\node_modules\#ethersproject\providers\lib\base-provider.js:20:58)
at processTicksAndRejections (node:internal/process/task_queues:94:5) {
code, get from here
const ethers = require('ethers');
const {ChainId, Token, TokenAmount, Fetcher, Pair, Route, Trade, TradeType, Percent} =
require('#pancakeswap-libs/sdk');
const Web3 = require('web3');
const {JsonRpcProvider} = require("#ethersproject/providers");
require("dotenv").config();
const provider = new JsonRpcProvider('https://bsc-dataseed1.binance.org/');
const web3 = new Web3('wss://apis.ankr.com/wss/c40792ffe3514537be9fb4109b32d257/946dd909d324e5a6caa2b72ba75c5799/binance/full/main');
const { address: admin } = web3.eth.accounts.wallet.add(process.env.PRIVATE_KEY);
console.log(`Modulos cargados`);
// Command Line Input
const InputTokenAddr = web3.utils.toChecksumAddress(process.argv[2]);
// var BUSD = '0xe9e7CEA3DedcA5984780Bafc599bD69ADd087D56';
const OutputTokenAddr = web3.utils.toChecksumAddress(process.argv[3]);
// var WBNB = '0xbb4CdB9CBd36B01bD1cBaEBF2De08d9173bc095c';
const InputTokenAmount = process.argv[4]
const Slipage = process.argv[5];
const PANCAKE_ROUTER = process.argv[6];
// const PANCAKE_ROUTER_V2 = '0x10ed43c718714eb63d5aa57b78b54704e256024e';
// const PANCAKE_ROUTER_V1 = '0x05fF2B0DB69458A0750badebc4f9e13aDd608C7F';
// 1/1000 = 0.001
const ONE_ETH_IN_WEI = web3.utils.toBN(web3.utils.toWei('1'));//BN->(BIG NUMBER) || toWei -> Converts any ether value value into wei.
const tradeAmount = ONE_ETH_IN_WEI.div(web3.utils.toBN('1000'));//tradeAmount = ONE_ETH_IN_WEI/1000
console.log(`tradeAmount ` + tradeAmount );
const init = async () => {
const [INPUT_TOKEN, OUTPUT_TOKEN] = await Promise.all(
[InputTokenAddr, OutputTokenAddr].map(tokenAddress => (
new Token(
ChainId.MAINNET,
tokenAddress,
18
)
)));
console.log(` <<<<<------- pair-------->>>>>`);
const pair = await Fetcher.fetchPairData(INPUT_TOKEN, OUTPUT_TOKEN, provider);
//console.log(JSON.stringify(pair));
console.log(` <<<<<------- route-------->>>>>`);
const route = await new Route([pair], INPUT_TOKEN);
//console.log(JSON.stringify(route));
console.log(` <<<<<------- Trade-------->>>>>`);
const trade = await new Trade(route, new TokenAmount(INPUT_TOKEN, tradeAmount), TradeType.EXACT_INPUT);
//console.log(JSON.stringify(trade));
//https://uniswap.org/docs/v2/javascript-SDK/trading/
const slippageTolerance = new Percent(Slipage, '100'); //
console.log("slippageTolerance: " + JSON.stringify(slippageTolerance));
// create transaction parameters
const amountOutMin = trade.minimumAmountOut(slippageTolerance).raw;
const path = [INPUT_TOKEN.address, OUTPUT_TOKEN.address];
const to = admin;
const deadline = Math.floor(Date.now() / 1000) + 60 * 20;
// Create signer
const wallet = new ethers.Wallet(
Buffer.from(
process.env.PRIVATE_KEY,
"hex"
)
);
const signer = wallet.connect(provider);
// Create Pancakeswap ethers Contract
const pancakeswap = new ethers.Contract(
PANCAKE_ROUTER,
['function swapExactTokensForTokens(uint amountIn, uint amountOutMin, address[] calldata path, address to, uint deadline) external returns (uint[] memory amounts)'],
signer
);
//Allow input token
if(true)
{
console.log(`Allow Pancakeswap <<<<<------- START-------->>>>>`);
let abi = ["function approve(address _spender, uint256 _value) public returns (bool success)"];
let contract = new ethers.Contract(INPUT_TOKEN.address, abi, signer);
let aproveResponse = await contract.approve(PANCAKE_ROUTER, ethers.utils.parseUnits('1000.0', 18), {gasLimit: 100000, gasPrice: 5e9});
console.log(JSON.stringify(aproveResponse));
console.log(`Allow Pancakeswap <<<<<------- END-------->>>>>`);
}
if(true)
{
console.log(`Ejecutando transaccion`);
var amountInParam = ethers.utils.parseUnits(InputTokenAmount, 18);
var amountOutMinParam = ethers.utils.parseUnits(web3.utils.fromWei(amountOutMin.toString()), 18);
console.log("amountInParam: " + amountInParam);
console.log("amountOutMinParam: " + amountOutMinParam);
console.log("amountOutMin: " + amountOutMin);
const tx = await pancakeswap.swapExactTokensForTokens(
amountInParam,
amountOutMinParam,
path,
to,
deadline,
{ gasLimit: ethers.utils.hexlify(300000), gasPrice: ethers.utils.parseUnits("9", "gwei") }
);
console.log(`Tx-hash: ${tx.hash}`)
const receipt = await tx.wait();
console.log(`Tx was mined in block: ${receipt.blockNumber}`);
}
}
init();