fix: fix bug

main
cyl19970726 2 years ago
parent b253020db9
commit 6823e4df65

@ -1,3 +1,3 @@
{ {
"Width": 120
} }

@ -1,7 +1,7 @@
# Contract Info # Contract Info
- RPC : https://galileo.web3q.io:8545 - RPC : https://galileo.web3q.io:8545
- ChainId : 3334 - ChainId : 3334
- Git3 Contract Address: 0x680336910D9357F6aDf26c0d61eAB8e65998Ab2d - Git3 Contract Address: 0x680336910D9357F6aDf26c0d61eAB8e65998Ab2d
# Sample Hardhat Project # Sample Hardhat Project

@ -4,12 +4,11 @@ pragma solidity ^0.8.0;
import "hardhat/console.sol"; import "hardhat/console.sol";
import "./IFileOperator.sol"; import "./IFileOperator.sol";
import "@openzeppelin/contracts/access/Ownable.sol"; import "@openzeppelin/contracts/access/Ownable.sol";
import "git3-evm-large-storage/contracts/LargeStorageManager.sol"; import "evm-large-storage/contracts/LargeStorageManager.sol";
// import "evm-large-storage/contracts/W3RC3.sol"; // import "evm-large-storage/contracts/W3RC3.sol";
contract Git3 is LargeStorageManager { contract Git3 is LargeStorageManager {
struct refInfo { struct refInfo {
bytes20 hash; bytes20 hash;
uint96 index; uint96 index;
@ -17,12 +16,12 @@ contract Git3 is LargeStorageManager {
struct refData { struct refData {
bytes20 hash; bytes20 hash;
string name; bytes name;
} }
mapping(bytes => address) public repoNameToOwner; mapping(bytes => address) public repoNameToOwner;
mapping(string => refInfo) public nameToRefInfo; // dev => {hash: 0x1234..., index: 1 } mapping(bytes => refInfo) public nameToRefInfo; // dev => {hash: 0x1234..., index: 1 }
string[] public refs; // [main, dev, test, staging] bytes[] public refs; // [main, dev, test, staging]
function _convertRefInfo( function _convertRefInfo(
refInfo memory info refInfo memory info
@ -34,7 +33,7 @@ contract Git3 is LargeStorageManager {
constructor() LargeStorageManager(0) {} constructor() LargeStorageManager(0) {}
modifier onlyOwner(bytes memory repoName) { modifier onlyOwner(bytes memory repoName) {
require(repoNameToOwner[repoName] == msg.sender); require(repoNameToOwner[repoName] == msg.sender, "only owner");
_; _;
} }
@ -43,20 +42,29 @@ contract Git3 is LargeStorageManager {
bytes memory path bytes memory path
) external view returns (bytes memory, bool) { ) external view returns (bytes memory, bool) {
// call flat directory(FD) // call flat directory(FD)
return _get(keccak256(bytes.concat(repoName, '/', path))); return _get(keccak256(bytes.concat(repoName, "/", path)));
} }
function createRepo(bytes memory repoName) function _createRepo(bytes memory repoName) internal {
external payable if (repoNameToOwner[repoName] == address(0)) {
{ repoNameToOwner[repoName] = msg.sender;
require(repoNameToOwner[repoName] == address(0)); } else {
repoNameToOwner[repoName] = msg.sender; require(repoNameToOwner[repoName] == msg.sender, "only owner");
}
} }
function upload(bytes memory repoName, bytes memory path, bytes calldata data) function upload(
external payable onlyOwner(repoName) bytes memory repoName,
{ bytes memory path,
_putChunkFromCalldata(keccak256(bytes.concat(repoName, '/', path)), 0, data,msg.value); bytes calldata data
) external payable {
_createRepo(repoName);
_putChunkFromCalldata(
keccak256(bytes.concat(repoName, "/", path)),
0,
data,
msg.value
);
} }
function uploadChunk( function uploadChunk(
@ -64,37 +72,59 @@ contract Git3 is LargeStorageManager {
bytes memory path, bytes memory path,
uint256 chunkId, uint256 chunkId,
bytes calldata data bytes calldata data
) external payable onlyOwner(repoName) { ) external payable {
_putChunkFromCalldata(keccak256(bytes.concat(repoName, '/', path)), chunkId, data,msg.value); _createRepo(repoName);
_putChunkFromCalldata(
keccak256(bytes.concat(repoName, "/", path)),
chunkId,
data,
msg.value
);
} }
function remove(bytes memory repoName, bytes memory path) external onlyOwner(repoName) { function remove(
bytes memory repoName,
bytes memory path
) external onlyOwner(repoName) {
// The actually process of remove will remove all the chunks // The actually process of remove will remove all the chunks
_remove(keccak256(bytes.concat(repoName, '/', path)),0); _remove(keccak256(bytes.concat(repoName, "/", path)), 0);
} }
function size(bytes memory name) external view returns (uint256, uint256) { function size(
return _size(keccak256(name)); bytes memory repoName,
bytes memory name
) external view returns (uint256, uint256) {
return _size(keccak256(bytes.concat(repoName, "/", name)));
} }
function countChunks(bytes memory name) external view returns (uint256) { function countChunks(
return _countChunks(keccak256(name)); bytes memory repoName,
bytes memory name
) external view returns (uint256) {
return _countChunks(keccak256(bytes.concat(repoName, "/", name)));
} }
function listRefs() public view returns (refData[] memory list) { function listRefs() public view returns (refData[] memory list) {
// todo: Differentiate all refs corresponding to a repo
list = new refData[](refs.length); list = new refData[](refs.length);
for (uint index = 0; index < refs.length; index++) { for (uint index = 0; index < refs.length; index++) {
list[index] = _convertRefInfo(nameToRefInfo[refs[index]]); list[index] = _convertRefInfo(nameToRefInfo[refs[index]]);
} }
} }
function setRef(
function setRef(bytes memory repoName, string memory name, bytes20 refHash) public onlyOwner(repoName) { bytes memory repoName,
bytes memory name,
bytes20 refHash
) public {
bytes memory fullName = bytes.concat(repoName, "/", name);
// only execute `sload` once to reduce gas consumption // only execute `sload` once to reduce gas consumption
refInfo memory srs; refInfo memory srs;
srs = nameToRefInfo[name]; srs = nameToRefInfo[fullName];
uint256 refsLen = refs.length; uint256 refsLen = refs.length;
_createRepo(repoName);
if (srs.hash == bytes20(0)) { if (srs.hash == bytes20(0)) {
// store refHash for the first time // store refHash for the first time
require( require(
@ -102,21 +132,24 @@ contract Git3 is LargeStorageManager {
"Refs exceed valid length" "Refs exceed valid length"
); );
nameToRefInfo[name].hash = refHash; nameToRefInfo[fullName].hash = refHash;
nameToRefInfo[name].index = uint96(refsLen); nameToRefInfo[fullName].index = uint96(refsLen);
refs.push(name); refs.push(fullName);
} else { } else {
// only update refHash // only update refHash
nameToRefInfo[name].hash = refHash; nameToRefInfo[fullName].hash = refHash;
} }
} }
function delRef(bytes memory repoName, string memory name) public onlyOwner(repoName) { function delRef(
bytes memory repoName,
bytes memory name
) public onlyOwner(repoName) {
bytes memory fullName = bytes.concat(repoName, "/", name);
// only execute `sload` once to reduce gas consumption // only execute `sload` once to reduce gas consumption
refInfo memory srs; refInfo memory srs;
srs = nameToRefInfo[name]; srs = nameToRefInfo[fullName];
uint256 refsLen = refs.length; uint256 refsLen = refs.length;
require( require(
@ -130,7 +163,6 @@ contract Git3 is LargeStorageManager {
nameToRefInfo[refs[refsLen - 1]].index = srs.index; nameToRefInfo[refs[refsLen - 1]].index = srs.index;
} }
refs.pop(); refs.pop();
delete nameToRefInfo[name]; delete nameToRefInfo[fullName];
} }
} }

@ -1,20 +1,19 @@
const NetworkDefinition = { const NetworkDefinition = {
rinkeby: { rinkeby: {
url: "https://rinkeby.infura.io/v3/*******your-api-key*******", url: "https://rinkeby.infura.io/v3/*******your-api-key*******",
accounts: { accounts: {
mnemonic: "test test test test test test test test test test test junk" mnemonic: "test test test test test test test test test test test junk",
} },
},
polygon: {
url: "https://polygon.infura.io/v3/*******your-api-key*******",
accounts: {
mnemonic: "test test test test test test test test test test test junk",
}, },
polygon: { },
url: "https://polygon.infura.io/v3/*******your-api-key*******", };
accounts: {
mnemonic: "test test test test test test test test test test test junk"
}
}
}
const EtherscanConfig = { const EtherscanConfig = {
apiKey: "YOUR_ETHERSCAN_API_KEY" apiKey: "YOUR_ETHERSCAN_API_KEY",
} };
export { NetworkDefinition, EtherscanConfig } export { NetworkDefinition, EtherscanConfig };

@ -2,7 +2,7 @@ require("dotenv").config();
import { HardhatUserConfig } from "hardhat/config"; import { HardhatUserConfig } from "hardhat/config";
import "@nomicfoundation/hardhat-toolbox"; import "@nomicfoundation/hardhat-toolbox";
import { NetworkDefinition, EtherscanConfig } from './local.config'; import { NetworkDefinition, EtherscanConfig } from "./local.config";
const config: HardhatUserConfig = { const config: HardhatUserConfig = {
solidity: { solidity: {
@ -12,18 +12,19 @@ const config: HardhatUserConfig = {
settings: { settings: {
optimizer: { optimizer: {
enabled: true, enabled: true,
runs: 1000 runs: 1000,
} },
} },
} },
], ],
}, },
networks: { networks: {
w3qGalileo: { w3qGalileo: {
url: "https://galileo.web3q.io:8545", url: "https://galileo.web3q.io:8545",
accounts: process.env.PRIVATE_KEY !== undefined ? [process.env.PRIVATE_KEY] : [], accounts:
process.env.PRIVATE_KEY !== undefined ? [process.env.PRIVATE_KEY] : [],
}, },
...NetworkDefinition ...NetworkDefinition,
}, },
etherscan: EtherscanConfig, etherscan: EtherscanConfig,
}; };

3090
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -22,10 +22,10 @@
"@typechain/hardhat": "^6.1.5", "@typechain/hardhat": "^6.1.5",
"dotenv": "^16.0.3", "dotenv": "^16.0.3",
"hardhat": "^2.12.4", "hardhat": "^2.12.4",
"hardhat-gas-reporter": "^1.0.9",
"prettier": "2.8.1", "prettier": "2.8.1",
"prettier-check": "^2.0.0", "prettier-check": "^2.0.0",
"prettier-plugin-solidity": "^1.0.0-beta.18", "prettier-plugin-solidity": "^1.0.0-beta.18",
"hardhat-gas-reporter": "^1.0.9",
"solidity-coverage": "^0.8.2", "solidity-coverage": "^0.8.2",
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"typescript": "^4.9.4" "typescript": "^4.9.4"
@ -35,8 +35,8 @@
"@nomiclabs/hardhat-ethers": "^2.2.1", "@nomiclabs/hardhat-ethers": "^2.2.1",
"@nomiclabs/hardhat-etherscan": "^3.1.3", "@nomiclabs/hardhat-etherscan": "^3.1.3",
"@openzeppelin/contracts": "^4.8.0", "@openzeppelin/contracts": "^4.8.0",
"git3-evm-large-storage": "^1.0.0",
"chai": "^4.3.7", "chai": "^4.3.7",
"evm-large-storage": "^1.0.0",
"typechain": "^8.1.1" "typechain": "^8.1.1"
} }
} }

@ -15,18 +15,23 @@ describe("Git3 Test", function () {
[singer] = await ethers.getSigners(); [singer] = await ethers.getSigners();
console.log("singer", singer.address); console.log("singer", singer.address);
await git3.upload("0x616263", "0x112233"); await git3.upload("0x11", "0x616263", "0x112233");
expect(await git3.download("0x616263")).to.eql(["0x112233", true]); expect(await git3.download("0x11", "0x616263")).to.eql(["0x112233", true]);
let data = Array.from({ length: 40 }, () => Math.floor(Math.random() * 256)); let data = Array.from({ length: 40 }, () =>
Math.floor(Math.random() * 256)
);
await git3.upload("0x616263", data); await git3.upload("0x11", "0x616263", data);
expect(await git3.download("0x616263")).to.eql([ethers.utils.hexlify(data), true]); expect(await git3.download("0x11", "0x616263")).to.eql([
ethers.utils.hexlify(data),
true,
]);
expect(await git3.size("0x616263")).to.eql([ToBig(40), ToBig(1)]); expect(await git3.size("0x11", "0x616263")).to.eql([ToBig(40), ToBig(1)]);
await git3.remove("0x616263"); await git3.remove("0x11", "0x616263");
expect(await git3.size("0x616263")).to.eql([ToBig(0), ToBig(0)]); expect(await git3.size("0x11", "0x616263")).to.eql([ToBig(0), ToBig(0)]);
}); });
it("upload/download/remove chunks", async function () { it("upload/download/remove chunks", async function () {
@ -34,20 +39,30 @@ describe("Git3 Test", function () {
const git3 = await Git3.deploy(); const git3 = await Git3.deploy();
await git3.deployed(); await git3.deployed();
expect(await git3.countChunks("0x616263")).to.eql(ToBig(0)); expect(await git3.countChunks("0x11", "0x616263")).to.eql(ToBig(0));
let data0 = Array.from({ length: 10 }, () => Math.floor(Math.random() * 256)); let data0 = Array.from({ length: 10 }, () =>
await git3.uploadChunk("0x616263", 0, data0); Math.floor(Math.random() * 256)
expect(await git3.download("0x616263")).to.eql([ethers.utils.hexlify(data0), true]); );
await git3.uploadChunk("0x11", "0x616263", 0, data0);
let data1 = Array.from({ length: 20 }, () => Math.floor(Math.random() * 256)); expect(await git3.download("0x11", "0x616263")).to.eql([
await git3.uploadChunk("0x616263", 1, data1); ethers.utils.hexlify(data0),
expect(await git3.download("0x616263")).to.eql([ethers.utils.hexlify(data0.concat(data1)), true]); true,
]);
await git3.remove("0x616263"); // should succeed
expect(await git3.size("0x616263")).to.eql([ToBig(0), ToBig(0)]); let data1 = Array.from({ length: 20 }, () =>
expect(await git3.download("0x616263")).to.eql(["0x", false]); Math.floor(Math.random() * 256)
expect(await git3.countChunks("0x616263")).to.eql(ToBig(0)); );
await git3.uploadChunk("0x11", "0x616263", 1, data1);
expect(await git3.download("0x11", "0x616263")).to.eql([
ethers.utils.hexlify(data0.concat(data1)),
true,
]);
await git3.remove("0x11", "0x616263"); // should succeed
expect(await git3.size("0x11", "0x616263")).to.eql([ToBig(0), ToBig(0)]);
expect(await git3.download("0x11", "0x616263")).to.eql(["0x", false]);
expect(await git3.countChunks("0x11", "0x616263")).to.eql(ToBig(0));
}); });
it("set/update/list/remove Reference", async function () { it("set/update/list/remove Reference", async function () {
@ -55,40 +70,45 @@ describe("Git3 Test", function () {
const git3 = await Git3.deploy(); const git3 = await Git3.deploy();
await git3.deployed(); await git3.deployed();
let repoName = "0x11";
function concatHexStr(s1, s2) {
return s1.concat("2f").concat(s2.slice(2));
}
let key0 = "0x616263"; let key0 = "0x616263";
let data0 = "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"; let data0 = "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
await git3.setRef(key0, data0); await git3.setRef(repoName, key0, data0);
let key1 = "0x717273"; let key1 = "0x717273";
let data1 = "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"; let data1 = "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb";
await git3.setRef(key1, data1); await git3.setRef(repoName, key1, data1);
let key2 = "0x818283"; let key2 = "0x818283";
let data2 = "0xcccccccccccccccccccccccccccccccccccccccc"; let data2 = "0xcccccccccccccccccccccccccccccccccccccccc";
await git3.setRef(key2, data2); await git3.setRef(repoName, key2, data2);
let refs = await git3.listRefs(); let refs = await git3.listRefs();
expect(refs[0]).to.eql([data0, key0]); expect(refs[0]).to.eql([data0, concatHexStr(repoName, key0)]);
expect(refs[1]).to.eql([data1, key1]); expect(refs[1]).to.eql([data1, concatHexStr(repoName, key1)]);
expect(refs[2]).to.eql([data2, key2]); expect(refs[2]).to.eql([data2, concatHexStr(repoName, key2)]);
expect(refs.length).to.eql(3); expect(refs.length).to.eql(3);
// check delRef // check delRef
await git3.delRef(key0); await git3.delRef(repoName, key0);
refs = await git3.listRefs(); refs = await git3.listRefs();
expect(refs[0]).to.eql([data2, key2]); expect(refs[0]).to.eql([data2, concatHexStr(repoName, key2)]);
expect(refs[1]).to.eql([data1, key1]); expect(refs[1]).to.eql([data1, concatHexStr(repoName, key1)]);
expect(refs.length).to.eql(2); expect(refs.length).to.eql(2);
await git3.delRef(key1); await git3.delRef(repoName, key1);
refs = await git3.listRefs(); refs = await git3.listRefs();
expect(refs[0]).to.eql([data2, key2]); expect(refs[0]).to.eql([data2, concatHexStr(repoName, key2)]);
expect(refs.length).to.eql(1); expect(refs.length).to.eql(1);
// check update // check update
let data3 = "0xdddddddddddddddddddddddddddddddddddddddd"; let data3 = "0xdddddddddddddddddddddddddddddddddddddddd";
await git3.setRef(key2, data3); await git3.setRef(repoName, key2, data3);
refs = await git3.listRefs(); refs = await git3.listRefs();
expect(refs[0]).to.eql([data3, key2]); expect(refs[0]).to.eql([data3, concatHexStr(repoName, key2)]);
}); });
}); });

@ -1,39 +1,41 @@
import hre from 'hardhat' import hre from "hardhat";
const { ethers } = hre; const { ethers } = hre;
import fs from 'fs' import fs from "fs";
async function main() { async function main() {
const accounts = await ethers.getSigners(); const accounts = await ethers.getSigners();
console.log(accounts[0].address); console.log(accounts[0].address);
const Git3 = await hre.ethers.getContractAt("Git3", "0xa709975Bc01e745432f8898499E7b9a60f420117") const Git3 = await hre.ethers.getContractAt(
let storageManager = await Git3.storageManager() "Git3",
console.log("storageManager", storageManager) "0xa709975Bc01e745432f8898499E7b9a60f420117"
);
const flat = await hre.ethers.getContractAt("FlatDirectory", storageManager) let storageManager = await Git3.storageManager();
let owner = await flat.owner() console.log("storageManager", storageManager);
console.log("owner", owner)
return const flat = await hre.ethers.getContractAt("FlatDirectory", storageManager);
let owner = await flat.owner();
console.log("owner", owner);
let file = fs.readFileSync("test/git3.png") return;
let buffer = Array.from(file).slice(0, 24576) let file = fs.readFileSync("test/git3.png");
let fileSize = buffer.length
console.log("buffer", buffer.length) let buffer = Array.from(file).slice(0, 24576);
let fileSize = buffer.length;
let cost = 0 console.log("buffer", buffer.length);
if (fileSize > 24 * 1024 - 326) {
cost = Math.floor((fileSize + 326) / 1024 / 24) let cost = 0;
} if (fileSize > 24 * 1024 - 326) {
let key = ethers.utils.toUtf8Bytes("aaa") cost = Math.floor((fileSize + 326) / 1024 / 24);
let rept = await Git3.upload(key, buffer, { value: ethers.utils.parseEther(cost.toString()) }) }
console.log("rept", "https://explorer.galileo.web3q.io/tx/" + rept.hash) let key = ethers.utils.toUtf8Bytes("aaa");
let rept = await Git3.upload(key, buffer, {
value: ethers.utils.parseEther(cost.toString()),
});
console.log("rept", "https://explorer.galileo.web3q.io/tx/" + rept.hash);
} }
main().catch((error) => { main().catch((error) => {
console.error(error); console.error(error);
process.exit(1); process.exit(1);
}); });

Loading…
Cancel
Save