-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathverifyLendContracts.js
296 lines (239 loc) · 9.55 KB
/
verifyLendContracts.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
// verifyContracts.js
const axios = require('axios');
const fs = require('fs-extra');
const path = require('path');
const Diff = require('diff');
const { execSync } = require('child_process');
// Configuration
const REPO_URL = 'https://github.com/revert-finance/lend.git';
const COMMIT_HASH = 'da1b1a2458666db01ee2fb98be190a70de16468b';
const LOCAL_REPO_DIR = path.join(__dirname, 'lend-verify'); // Directory to clone the repo into
// Replace with your Arbitrum contract addresses
const contractAddresses = [
'0x4f8629c1056d7c7fc7e132ad2234761488baa9be',
'0xd0186335f7b7c390b6d6c0c021212243ed297dda',
'0x9d97c76102e72883cd25fa60e0f4143516d5b6db',
'0xcfd55ac7647454ea0f7c4c9ec231e0a282b30980',
'0x74e6afef5705beb126c6d3bf46f8fad8f3e07825',
'0x9F703BFccd04389725FbaD7Bc50F2E345583d506',
'0xe5047b321071b939d48ae8aa34770c9838bb25e8',
'0x18616c0a8389a2cabf596f91d3e6ccc626e58997'
];
// Arbiscan API configuration
const ETHERSCAN_API_URL = 'https://api.arbiscan.io/api';
const ETHERSCAN_API_KEY = ''; // Replace with your Arbiscan API key
// Helper function to sleep
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
// Main function to perform the entire process
async function main() {
try {
// Step 1: Clone the repository
console.log('Cloning repository...');
execSync(`git clone ${REPO_URL} ${LOCAL_REPO_DIR}`, { stdio: 'inherit' });
// Step 2: Checkout the specific commit
console.log(`Checking out commit ${COMMIT_HASH}...`);
execSync(`git checkout ${COMMIT_HASH}`, { cwd: LOCAL_REPO_DIR, stdio: 'inherit' });
console.log('Installing dependencies using Forge...');
try {
execSync('forge install', { cwd: LOCAL_REPO_DIR, stdio: 'inherit' });
} catch (error) {
console.error('Error installing dependencies with forge:', error.message);
throw error;
}
// Step 4: Update PoolAddress.sol with Arbitrum-specific POOL_INIT_CODE_HASH
console.log('Updating PoolAddress.sol with Arbitrum-specific POOL_INIT_CODE_HASH...');
updatePoolAddress();
// Step 5: Run the verification process
console.log('Starting verification process...');
await verifyContracts();
// Step 6: Clean up (delete the cloned repository)
console.log('Cleaning up...');
fs.removeSync(LOCAL_REPO_DIR);
console.log('Done.');
} catch (error) {
console.error('An error occurred:', error.message);
// Clean up even if there's an error
if (fs.existsSync(LOCAL_REPO_DIR)) {
fs.removeSync(LOCAL_REPO_DIR);
}
}
}
// Function to update PoolAddress.sol
function updatePoolAddress() {
const poolAddressPath = path.join(
LOCAL_REPO_DIR,
'lib',
'v3-periphery',
'contracts',
'libraries',
'PoolAddress.sol'
);
if (!fs.existsSync(poolAddressPath)) {
console.error('PoolAddress.sol not found at expected path:', poolAddressPath);
process.exit(1);
}
let content = fs.readFileSync(poolAddressPath, 'utf8');
// Replace the POOL_INIT_CODE_HASH value with the Arbitrum-specific hash
const arbitrumInitCodeHash = '0xe34f199b19b2b4f47f68442619d555527d244f78a3297ea89325f843f87b8b54';
const regex = /bytes32\s+internal\s+constant\s+POOL_INIT_CODE_HASH\s*=\s*0x[0-9a-fA-F]+;/;
if (!regex.test(content)) {
console.error('Unable to find POOL_INIT_CODE_HASH in PoolAddress.sol');
process.exit(1);
}
content = content.replace(
regex,
`bytes32 internal constant POOL_INIT_CODE_HASH = ${arbitrumInitCodeHash};`
);
fs.writeFileSync(poolAddressPath, content, 'utf8');
console.log('PoolAddress.sol updated successfully.');
}
// Main function to verify contracts
async function verifyContracts() {
for (const address of contractAddresses) {
await verifyContract(address);
// Add a delay to prevent exceeding the rate limit
await sleep(250); // 250 milliseconds delay
}
}
// Function to verify a single contract
async function verifyContract(address) {
console.log(`\nVerifying contract at address: ${address}`);
const result = await fetchSourceCode(address);
if (!result) {
console.log(`Failed to fetch source code for address: ${address}`);
return;
}
const { sources: remoteSources, contractName } = result;
const allMatched = compareSourceFiles(remoteSources, LOCAL_REPO_DIR);
if (allMatched) {
console.log(`All source files match for contract ${contractName} at address ${address}.`);
} else {
console.log(
`Differences found in source files for contract ${contractName} at address ${address}.`
);
}
}
// Fetch contract source code from Arbiscan
async function fetchSourceCode(address) {
try {
const response = await axios.get(ETHERSCAN_API_URL, {
params: {
module: 'contract',
action: 'getsourcecode',
address: address,
apikey: ETHERSCAN_API_KEY,
},
});
if (response.data.status !== '1') {
throw new Error(`Arbiscan API error: ${response.data.result}`);
}
const result = response.data.result[0];
let sourceCode = result.SourceCode;
const contractName = result.ContractName;
// Check if sourceCode is empty or undefined
if (!sourceCode || sourceCode.trim() === '') {
throw new Error(
`No source code found for address ${address}. The contract may not be verified on Arbiscan.`
);
}
// Handle possible Solidity files wrapped in JSON (multi-part files)
let parsedSources = null;
if (sourceCode.startsWith('{{') && sourceCode.endsWith('}}')) {
// Remove extra braces and parse JSON
const parsed = JSON.parse(sourceCode.slice(1, -1));
if (parsed.sources) {
parsedSources = parsed.sources;
} else {
throw new Error(`Unable to find 'sources' key in SourceCode for address ${address}`);
}
} else if (sourceCode.startsWith('{') && sourceCode.endsWith('}')) {
// Handle single-brace JSON structures
const parsed = JSON.parse(sourceCode);
if (parsed.sources) {
parsedSources = parsed.sources;
} else {
throw new Error(`Unable to find 'sources' key in SourceCode for address ${address}`);
}
} else {
throw new Error(`SourceCode format not recognized for address ${address}`);
}
// Optional: Save the remote sources for inspection
saveRemoteSources(parsedSources, contractName, address);
return { sources: parsedSources, contractName };
} catch (error) {
console.error(`Error fetching source code for address ${address}:`, error.message);
return null;
}
}
// Save remote source files to disk for inspection
function saveRemoteSources(sources, contractName, address) {
const outputDir = path.join(__dirname, 'remote_sources', `${contractName}_${address}`);
fs.ensureDirSync(outputDir);
for (const filePath in sources) {
const fileContent = sources[filePath].content;
const outputFilePath = path.join(outputDir, filePath);
// Ensure the directory exists
fs.ensureDirSync(path.dirname(outputFilePath));
fs.writeFileSync(outputFilePath, fileContent, 'utf8');
}
console.log(`Remote source files saved to: ${outputDir}`);
}
// Compare remote and local source files
function compareSourceFiles(remoteSources, localSourceDir) {
let allMatched = true;
for (const filePath in remoteSources) {
const remoteContent = remoteSources[filePath].content;
const localFilePath = path.join(localSourceDir, filePath);
if (!fs.existsSync(localFilePath)) {
console.log(`Local file not found: ${localFilePath}`);
allMatched = false;
continue;
}
const localContent = fs.readFileSync(localFilePath, 'utf8');
const normalizedRemote = normalizeSourceCode(remoteContent);
const normalizedLocal = normalizeSourceCode(localContent);
if (normalizedRemote !== normalizedLocal) {
const diff = Diff.createPatch(filePath, localContent, remoteContent);
// Split the diff into lines
const diffLines = diff.split('\n');
// Check if the diff ONLY contains the removal of SequencerUptimeFeedInvalid
//
const acceptableDiff = diffLines.length === 15 &&
diffLines[0] === 'Index: src/utils/Constants.sol' &&
diffLines[4] === '@@ -27,9 +27,8 @@' &&
diffLines[9] === '- error SequencerUptimeFeedInvalid();'
const acceptableDiffContracts = ["0xd0186335f7b7c390b6d6c0c021212243ed297dda",
"0x9d97c76102e72883cd25fa60e0f4143516d5b6db",
"0xcfd55ac7647454ea0f7c4c9ec231e0a282b30980"]
if (!acceptableDiff and acceptableDiffContracts) {
console.log(`Mismatch in file: ${filePath}`);
console.log(diff);
allMatched = false;
} else {
console.log(`Acceptable difference found in file: ${filePath}`);
console.log('The SequencerUptimeFeedInvalid error was removed, which is allowed.');
}
} else {
console.log(`File matches: ${filePath}`);
}
}
return allMatched;
}
// Normalize source code to reduce formatting differences
function normalizeSourceCode(code) {
// Remove comments
code = code.replace(/\/\/.*$/gm, ''); // Single-line comments
code = code.replace(/\/\*[\s\S]*?\*\//g, ''); // Multi-line comments
// Remove carriage returns and normalize line endings
code = code.replace(/\r/g, '');
// Remove extra whitespace
code = code.replace(/\s+/g, ' ');
// Remove SPDX License Identifiers and Pragma statements
code = code.replace(/SPDX-License-Identifier:.*?(\n|$)/g, '');
code = code.replace(/pragma solidity.*?;/g, '');
return code.trim();
}
// Start the process
main();