Merge pull request #1 from cyl19970726/dev1

Add access control and support multiple repons
main
hhh_QC 2 years ago committed by GitHub
commit b0ff3488c2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,6 @@
node_modules
artifacts
cache
coverage*
gasReporterOutput.json
typechain-types

@ -0,0 +1,3 @@
{
"Width": 120
}

@ -1,10 +1,9 @@
# Contract Info
- RPC : https://galileo.web3q.io:8545
- ChainId : 3334
- ChainId : 3334
- Git3 Contract Address: 0x680336910D9357F6aDf26c0d61eAB8e65998Ab2d
# Sample Hardhat Project
This project demonstrates a basic Hardhat use case. It comes with a sample contract, a test for that contract, and a script that deploys that contract.

@ -4,12 +4,11 @@ pragma solidity ^0.8.0;
import "hardhat/console.sol";
import "./IFileOperator.sol";
import "@openzeppelin/contracts/access/Ownable.sol";
import "evm-large-storage/contracts/examples/FlatDirectory.sol";
import "evm-large-storage/contracts/LargeStorageManager.sol";
// import "evm-large-storage/contracts/W3RC3.sol";
contract Git3 {
IFileOperator public immutable storageManager;
contract Git3 is LargeStorageManager {
struct refInfo {
bytes20 hash;
uint96 index;
@ -17,26 +16,25 @@ contract Git3 {
struct refData {
bytes20 hash;
string name;
bytes name;
}
mapping(bytes => address) public repoNameToOwner;
mapping(string => refInfo) public nameToRefInfo; // dev => {hash: 0x1234..., index: 1 }
string[] public refs; // [main, dev, test, staging]
mapping(bytes => refInfo) public nameToRefInfo; // dev => {hash: 0x1234..., index: 1 }
mapping(bytes => bytes[]) public repoNameToRefs; // [main, dev, test, staging]
function _convertRefInfo(
bytes memory repoName,
refInfo memory info
) internal view returns (refData memory res) {
res.hash = info.hash;
res.name = refs[info.index];
res.name = repoNameToRefs[repoName][info.index];
}
constructor() {
storageManager = IFileOperator(address(new FlatDirectory(220)));
}
constructor() LargeStorageManager(0) {}
modifier onlyOwner(bytes memory repoName) {
require(repoNameToOwner[repoName] == msg.sender);
require(repoNameToOwner[repoName] == msg.sender, "only owner");
_;
}
@ -45,57 +43,80 @@ contract Git3 {
bytes memory path
) external view returns (bytes memory, bool) {
// call flat directory(FD)
return storageManager.read(bytes.concat(repoName, '/', path));
return _get(keccak256(bytes.concat(repoName, "/", path)));
}
function createRepo(bytes memory repoName)
external payable
{
require(repoNameToOwner[repoName] == address(0));
function createRepo(bytes memory repoName) external{
require(repoNameToOwner[repoName] == address(0),"RepoName already exist");
repoNameToOwner[repoName] = msg.sender;
}
function upload(bytes memory repoName, bytes memory path, bytes memory data)
external payable onlyOwner(repoName)
{
storageManager.writeChunk{value: msg.value}(bytes.concat(repoName, '/', path), 0, data);
function upload(
bytes memory repoName,
bytes memory path,
bytes calldata data
) external payable onlyOwner(repoName){
_putChunkFromCalldata(
keccak256(bytes.concat(repoName, "/", path)),
0,
data,
msg.value
);
}
function uploadChunk(
bytes memory repoName,
bytes memory path,
uint256 chunkId,
bytes memory data
) external payable onlyOwner(repoName) {
storageManager.writeChunk{value: msg.value}(bytes.concat(repoName, '/', path), chunkId, data);
bytes calldata data
) external payable onlyOwner(repoName){
_putChunkFromCalldata(
keccak256(bytes.concat(repoName, "/", path)),
chunkId,
data,
msg.value
);
}
function remove(bytes memory repoName, bytes memory path) external onlyOwner(repoName) {
function remove(
bytes memory repoName,
bytes memory path
) external onlyOwner(repoName) {
// The actually process of remove will remove all the chunks
storageManager.remove(bytes.concat(repoName, '/', path));
_remove(keccak256(bytes.concat(repoName, "/", path)), 0);
}
function size(string memory name) external view returns (uint256, uint256) {
return storageManager.size(bytes(name));
function size(
bytes memory repoName,
bytes memory name
) external view returns (uint256, uint256) {
return _size(keccak256(bytes.concat(repoName, "/", name)));
}
function countChunks(string memory name) external view returns (uint256) {
return storageManager.countChunks(bytes(name));
function countChunks(
bytes memory repoName,
bytes memory name
) external view returns (uint256) {
return _countChunks(keccak256(bytes.concat(repoName, "/", name)));
}
function listRefs() public view returns (refData[] memory list) {
list = new refData[](refs.length);
for (uint index = 0; index < refs.length; index++) {
list[index] = _convertRefInfo(nameToRefInfo[refs[index]]);
function listRefs(bytes memory repoName) public view returns (refData[] memory list) {
list = new refData[](repoNameToRefs[repoName].length);
for (uint index = 0; index < repoNameToRefs[repoName].length; index++) {
list[index] = _convertRefInfo(repoName,nameToRefInfo[repoNameToRefs[repoName][index]]);
}
}
function setRef(bytes memory repoName, string memory name, bytes20 refHash) public onlyOwner(repoName) {
function setRef(
bytes memory repoName,
bytes memory name,
bytes20 refHash
) public onlyOwner(repoName){
bytes memory fullName = bytes.concat(repoName, "/", name);
// only execute `sload` once to reduce gas consumption
refInfo memory srs;
srs = nameToRefInfo[name];
uint256 refsLen = refs.length;
srs = nameToRefInfo[fullName];
uint256 refsLen = repoNameToRefs[repoName].length;
if (srs.hash == bytes20(0)) {
// store refHash for the first time
@ -104,21 +125,25 @@ contract Git3 {
"Refs exceed valid length"
);
nameToRefInfo[name].hash = refHash;
nameToRefInfo[name].index = uint96(refsLen);
nameToRefInfo[fullName].hash = refHash;
nameToRefInfo[fullName].index = uint96(refsLen);
refs.push(name);
repoNameToRefs[repoName].push(fullName);
} else {
// only update refHash
nameToRefInfo[name].hash = refHash;
nameToRefInfo[fullName].hash = refHash;
}
}
function delRef(bytes memory repoName, string memory name) public onlyOwner(repoName) {
function delRef(
bytes memory repoName,
bytes memory name
) public onlyOwner(repoName) {
bytes memory fullName = bytes.concat(repoName, "/", name);
// only execute `sload` once to reduce gas consumption
refInfo memory srs;
srs = nameToRefInfo[name];
uint256 refsLen = refs.length;
srs = nameToRefInfo[fullName];
uint256 refsLen = repoNameToRefs[repoName].length;
require(
srs.hash != bytes20(0),
@ -127,10 +152,10 @@ contract Git3 {
require(srs.index < refsLen, "System Error: Invalid index");
if (srs.index < refsLen - 1) {
refs[srs.index] = refs[refsLen - 1];
nameToRefInfo[refs[refsLen - 1]].index = srs.index;
repoNameToRefs[repoName][srs.index] = repoNameToRefs[repoName][refsLen - 1];
nameToRefInfo[repoNameToRefs[repoName][refsLen - 1]].index = srs.index;
}
refs.pop();
delete nameToRefInfo[name];
repoNameToRefs[repoName].pop();
delete nameToRefInfo[fullName];
}
}

@ -21,17 +21,32 @@ interface IFileOperator {
bytes memory data
) external payable;
function readChunk(bytes memory name, uint256 chunkId) external view returns (bytes memory, bool);
function readChunk(
bytes memory name,
uint256 chunkId
) external view returns (bytes memory, bool);
function chunkSize(bytes memory name, uint256 chunkId) external view returns (uint256, bool);
function chunkSize(
bytes memory name,
uint256 chunkId
) external view returns (uint256, bool);
function removeChunk(bytes memory name, uint256 chunkId) external returns (bool);
function removeChunk(
bytes memory name,
uint256 chunkId
) external returns (bool);
function truncate(bytes memory name, uint256 chunkId) external returns (uint256);
function truncate(
bytes memory name,
uint256 chunkId
) external returns (uint256);
function refund() external;
function destruct() external;
function getChunkHash(bytes memory name, uint256 chunkId) external view returns (bytes32);
function getChunkHash(
bytes memory name,
uint256 chunkId
) external view returns (bytes32);
}

@ -1,20 +1,19 @@
const NetworkDefinition = {
rinkeby: {
url: "https://rinkeby.infura.io/v3/*******your-api-key*******",
accounts: {
mnemonic: "test test test test test test test test test test test junk"
}
rinkeby: {
url: "https://rinkeby.infura.io/v3/*******your-api-key*******",
accounts: {
mnemonic: "test test test test test test test test test test test junk",
},
},
polygon: {
url: "https://polygon.infura.io/v3/*******your-api-key*******",
accounts: {
mnemonic: "test test test test test test test test test test test junk",
},
polygon: {
url: "https://polygon.infura.io/v3/*******your-api-key*******",
accounts: {
mnemonic: "test test test test test test test test test test test junk"
}
}
}
},
};
const EtherscanConfig = {
apiKey: "YOUR_ETHERSCAN_API_KEY"
}
apiKey: "YOUR_ETHERSCAN_API_KEY",
};
export { NetworkDefinition, EtherscanConfig }
export { NetworkDefinition, EtherscanConfig };

@ -1,10 +1,8 @@
require("dotenv").config();
import { HardhatUserConfig } from "hardhat/config";
import "@nomicfoundation/hardhat-toolbox";
import { NetworkDefinition, EtherscanConfig } from './local.config';
import { NetworkDefinition, EtherscanConfig } from "./local.config";
const config: HardhatUserConfig = {
solidity: {
@ -14,18 +12,19 @@ const config: HardhatUserConfig = {
settings: {
optimizer: {
enabled: true,
runs: 1000
}
}
}
runs: 1000,
},
},
},
],
},
networks: {
w3qGalileo: {
url: "https://galileo.web3q.io:8545",
accounts: process.env.PRIVATE_KEY !== undefined ? [process.env.PRIVATE_KEY] : [],
accounts:
process.env.PRIVATE_KEY !== undefined ? [process.env.PRIVATE_KEY] : [],
},
...NetworkDefinition
...NetworkDefinition,
},
etherscan: EtherscanConfig,
};

10478
package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -23,6 +23,9 @@
"dotenv": "^16.0.3",
"hardhat": "^2.12.4",
"hardhat-gas-reporter": "^1.0.9",
"prettier": "2.8.1",
"prettier-check": "^2.0.0",
"prettier-plugin-solidity": "^1.0.0-beta.18",
"solidity-coverage": "^0.8.2",
"ts-node": "^10.9.1",
"typescript": "^4.9.4"

@ -11,25 +11,28 @@ describe("Git3 Test", function () {
const git3 = await Git3.deploy();
await git3.deployed();
await git3.upload("0x616263", "0x112233");
expect(await git3.download("0x616263")).to.eql(["0x112233", true]);
let singer;
[singer] = await ethers.getSigners();
await git3.createRepo("0x11");
await git3.upload("0x11", "0x616263", "0x112233");
expect(await git3.download("0x11", "0x616263")).to.eql(["0x112233", true]);
let data = Array.from({ length: 40 }, () =>
Math.floor(Math.random() * 256)
);
await git3.upload("0x616263", data);
expect(await git3.download("0x616263")).to.eql([
await git3.upload("0x11", "0x616263", data);
expect(await git3.download("0x11", "0x616263")).to.eql([
ethers.utils.hexlify(data),
true,
]);
expect(await git3.size("0x616263")).to.eql([ToBig(40), ToBig(1)]);
await git3.remove("0x616263");
expect(await git3.size("0x616263")).to.eql([ToBig(0), ToBig(0)]);
expect(await git3.size("0x11", "0x616263")).to.eql([ToBig(40), ToBig(1)]);
await git3.remove("0x11", "0x616263");
expect(await git3.size("0x11", "0x616263")).to.eql([ToBig(0), ToBig(0)]);
});
it("upload/download/remove chunks", async function () {
@ -37,13 +40,15 @@ describe("Git3 Test", function () {
const git3 = await Git3.deploy();
await git3.deployed();
expect(await git3.countChunks("0x616263")).to.eql(ToBig(0));
await git3.createRepo("0x11");
expect(await git3.countChunks("0x11", "0x616263")).to.eql(ToBig(0));
let data0 = Array.from({ length: 10 }, () =>
Math.floor(Math.random() * 256)
);
await git3.uploadChunk("0x616263",0, data0);
expect(await git3.download("0x616263")).to.eql([
await git3.uploadChunk("0x11", "0x616263", 0, data0);
expect(await git3.download("0x11", "0x616263")).to.eql([
ethers.utils.hexlify(data0),
true,
]);
@ -51,63 +56,87 @@ describe("Git3 Test", function () {
let data1 = Array.from({ length: 20 }, () =>
Math.floor(Math.random() * 256)
);
await git3.uploadChunk("0x616263", 1, data1);
expect(await git3.download("0x616263")).to.eql([
await git3.uploadChunk("0x11", "0x616263", 1, data1);
expect(await git3.download("0x11", "0x616263")).to.eql([
ethers.utils.hexlify(data0.concat(data1)),
true,
]);
await git3.remove("0x616263"); // should succeed
expect(await git3.size("0x616263")).to.eql([ToBig(0), ToBig(0)]);
expect(await git3.download("0x616263")).to.eql([
"0x",
false,
]);
expect(await git3.countChunks("0x616263")).to.eql(ToBig(0));
await git3.remove("0x11", "0x616263"); // should succeed
expect(await git3.size("0x11", "0x616263")).to.eql([ToBig(0), ToBig(0)]);
expect(await git3.download("0x11", "0x616263")).to.eql(["0x", false]);
expect(await git3.countChunks("0x11", "0x616263")).to.eql(ToBig(0));
});
it("set/update/list/remove Reference",async function() {
it("set/update/list/remove Reference", async function () {
const Git3 = await ethers.getContractFactory("Git3");
const git3 = await Git3.deploy();
await git3.deployed();
let repoName = "0x11";
await git3.createRepo(repoName);
function concatHexStr(s1, s2) {
return s1.concat("2f").concat(s2.slice(2));
}
let key0 = "0x616263";
let data0 = "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
await git3.setRef(key0,data0);
await git3.setRef(repoName, key0, data0);
let key1 = "0x717273";
let data1 = "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb";
await git3.setRef(key1,data1);
await git3.setRef(repoName, key1, data1);
let key2 = "0x818283";
let data2 = "0xcccccccccccccccccccccccccccccccccccccccc";
await git3.setRef(key2,data2);
await git3.setRef(repoName, key2, data2);
let refs = await git3.listRefs();
expect(refs[0]).to.eql([data0,key0]);
expect(refs[1]).to.eql([data1,key1]);
expect(refs[2]).to.eql([data2,key2]);
let refs = await git3.listRefs(repoName);
expect(refs[0]).to.eql([data0, concatHexStr(repoName, key0)]);
expect(refs[1]).to.eql([data1, concatHexStr(repoName, key1)]);
expect(refs[2]).to.eql([data2, concatHexStr(repoName, key2)]);
expect(refs.length).to.eql(3);
// check delRef
await git3.delRef(key0);
refs = await git3.listRefs();
expect(refs[0]).to.eql([data2,key2]);
expect(refs[1]).to.eql([data1,key1]);
// check delRef
await git3.delRef(repoName, key0);
refs = await git3.listRefs(repoName);
expect(refs[0]).to.eql([data2, concatHexStr(repoName, key2)]);
expect(refs[1]).to.eql([data1, concatHexStr(repoName, key1)]);
expect(refs.length).to.eql(2);
await git3.delRef(key1);
refs = await git3.listRefs();
expect(refs[0]).to.eql([data2,key2]);
await git3.delRef(repoName, key1);
refs = await git3.listRefs(repoName);
expect(refs[0]).to.eql([data2, concatHexStr(repoName, key2)]);
expect(refs.length).to.eql(1);
// check update
let data3 = "0xdddddddddddddddddddddddddddddddddddddddd";
await git3.setRef(key2,data3);
refs = await git3.listRefs();
expect(refs[0]).to.eql([data3,key2]);
await git3.setRef(repoName, key2, data3);
refs = await git3.listRefs(repoName);
expect(refs[0]).to.eql([data3, concatHexStr(repoName, key2)]);
});
it("Access Control", async function () {
const Git3 = await ethers.getContractFactory("Git3");
const git3 = await Git3.deploy();
await git3.deployed();
let singer;
let user1;
[singer,user1,] = await ethers.getSigners();
await git3.connect(singer).createRepo("0x11");
})
await expect(git3.connect(user1).upload("0x11", "0x616263", "0x112233")).to.be.revertedWith("only owner");
await expect(git3.connect(user1).uploadChunk("0x11", "0x616263", 0,"0x112233")).to.be.revertedWith("only owner");
await expect(git3.connect(user1).setRef("0x11", "0x616263", "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")).to.be.revertedWith("only owner");
await git3.connect(singer).upload("0x11", "0x616263", "0x112233")
expect(await git3.download("0x11", "0x616263")).to.eql(["0x112233", true]);
await git3.connect(singer).setRef("0x11", "0x616263", "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")
await expect(git3.connect(user1).remove("0x11", "0x616263")).to.be.revertedWith("only owner");
await expect(git3.connect(user1).delRef("0x11", "0x616263")).to.be.revertedWith("only owner");
});
});

@ -1,39 +1,41 @@
import hre from 'hardhat'
import hre from "hardhat";
const { ethers } = hre;
import fs from 'fs'
import fs from "fs";
async function main() {
const accounts = await ethers.getSigners();
console.log(accounts[0].address);
const Git3 = await hre.ethers.getContractAt("Git3", "0xa709975Bc01e745432f8898499E7b9a60f420117")
let storageManager = await Git3.storageManager()
console.log("storageManager", storageManager)
const flat = await hre.ethers.getContractAt("FlatDirectory", storageManager)
let owner = await flat.owner()
console.log("owner", owner)
return
let file = fs.readFileSync("test/git3.png")
let buffer = Array.from(file).slice(0, 24576)
let fileSize = buffer.length
console.log("buffer", buffer.length)
let cost = 0
if (fileSize > 24 * 1024 - 326) {
cost = Math.floor((fileSize + 326) / 1024 / 24)
}
let key = ethers.utils.toUtf8Bytes("aaa")
let rept = await Git3.upload(key, buffer, { value: ethers.utils.parseEther(cost.toString()) })
console.log("rept", "https://explorer.galileo.web3q.io/tx/" + rept.hash)
const accounts = await ethers.getSigners();
console.log(accounts[0].address);
const Git3 = await hre.ethers.getContractAt(
"Git3",
"0xa709975Bc01e745432f8898499E7b9a60f420117"
);
let storageManager = await Git3.storageManager();
console.log("storageManager", storageManager);
const flat = await hre.ethers.getContractAt("FlatDirectory", storageManager);
let owner = await flat.owner();
console.log("owner", owner);
return;
let file = fs.readFileSync("test/git3.png");
let buffer = Array.from(file).slice(0, 24576);
let fileSize = buffer.length;
console.log("buffer", buffer.length);
let cost = 0;
if (fileSize > 24 * 1024 - 326) {
cost = Math.floor((fileSize + 326) / 1024 / 24);
}
let key = ethers.utils.toUtf8Bytes("aaa");
let rept = await Git3.upload(key, buffer, {
value: ethers.utils.parseEther(cost.toString()),
});
console.log("rept", "https://explorer.galileo.web3q.io/tx/" + rept.hash);
}
main().catch((error) => {
console.error(error);
process.exit(1);
});
console.error(error);
process.exit(1);
});

Loading…
Cancel
Save