mirror of git3://git3.w3q/git3-contract
parent
200693d07b
commit
34b779ff30
@ -0,0 +1,11 @@
|
||||
node_modules
|
||||
.env
|
||||
coverage
|
||||
coverage.json
|
||||
typechain
|
||||
typechain-types
|
||||
|
||||
# Hardhat files
|
||||
cache
|
||||
artifacts
|
||||
|
@ -0,0 +1,13 @@
|
||||
# Sample Hardhat Project
|
||||
|
||||
This project demonstrates a basic Hardhat use case. It comes with a sample contract, a test for that contract, and a script that deploys that contract.
|
||||
|
||||
Try running some of the following tasks:
|
||||
|
||||
```shell
|
||||
npx hardhat help
|
||||
npx hardhat test
|
||||
REPORT_GAS=true npx hardhat test
|
||||
npx hardhat node
|
||||
npx hardhat run scripts/deploy.ts
|
||||
```
|
@ -0,0 +1,139 @@
|
||||
//SPDX-License-Identifier: Unlicense
|
||||
pragma solidity ^0.8.0;
|
||||
|
||||
import "hardhat/console.sol";
|
||||
import "./IFileOperator.sol";
|
||||
import "@openzeppelin/contracts/access/Ownable.sol";
|
||||
import "evm-large-storage/contracts/examples/FlatDirectory.sol";
|
||||
// import "evm-large-storage/contracts/W3RC3.sol";
|
||||
|
||||
contract Git3 {
|
||||
uint256 constant REF_HASH_LEN = 40;
|
||||
IFileOperator public immutable storageManager;
|
||||
|
||||
constructor() {
|
||||
storageManager = IFileOperator(address(new FlatDirectory(220)));
|
||||
}
|
||||
|
||||
// download(path: string): Promise<[Status, Buffer]> // objects/3e/3432eac32.../
|
||||
function download(bytes memory path) external view returns (bytes memory, bool) {
|
||||
// call flat directory(FD)
|
||||
return storageManager.read(path);
|
||||
}
|
||||
|
||||
// upload(path: string, file: Buffer): Promise<Status>
|
||||
function upload(bytes memory path, bytes memory data) external payable {
|
||||
storageManager.writeChunk(path, 0, data);
|
||||
}
|
||||
|
||||
function uploadChunk(bytes memory path,uint256 chunkId, bytes memory data) external payable {
|
||||
storageManager.writeChunk(path, chunkId, data);
|
||||
}
|
||||
|
||||
// delete(path: string): Promise<Status>
|
||||
function remove(bytes memory path) external {
|
||||
// The actually process of remove will remove all the chunks
|
||||
storageManager.remove(path);
|
||||
}
|
||||
|
||||
function size(bytes memory name) external view returns (uint256,uint256){
|
||||
return storageManager.size(name);
|
||||
}
|
||||
|
||||
function countChunks(bytes memory name)external view returns (uint256){
|
||||
return storageManager.countChunks(name);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
The Storage Layout as below:
|
||||
slot n = [hash1]
|
||||
slot n+1 = [hash2,index]
|
||||
**/
|
||||
struct refInfo {
|
||||
bytes32 hash1;
|
||||
bytes8 hash2;
|
||||
uint192 index; // 8 * 24 = 192
|
||||
}
|
||||
|
||||
struct refData {
|
||||
bytes hash;
|
||||
string name;
|
||||
}
|
||||
|
||||
mapping (string => refInfo) public nameToRefInfo; // dev => {hash: 0x1234..., index: 1 }
|
||||
string[] public refs; // [main, dev, test, staging]
|
||||
|
||||
function _setRefInfo(refInfo storage ref, bytes memory hash,uint192 index) internal{
|
||||
require(hash.length == REF_HASH_LEN,"Incorrect RefHash Length");
|
||||
bytes32 hash1;
|
||||
bytes32 hash2;
|
||||
|
||||
assembly{
|
||||
hash1 := mload(add(hash,0x20))
|
||||
// sstore(ref.slot,hash1)
|
||||
hash2 := mload(add(hash,0x40))
|
||||
// sstore(add(ref.slot,0x20),add(hash2,index))
|
||||
}
|
||||
|
||||
ref.hash1 = hash1;
|
||||
ref.hash2 = bytes8(hash2);
|
||||
ref.index = index;
|
||||
}
|
||||
|
||||
// listRefs(): Promise<Ref[]>
|
||||
function _convertRefInfo(refInfo storage info) internal view returns(refData memory res){
|
||||
// res .hash =
|
||||
bytes memory hash = new bytes(REF_HASH_LEN);
|
||||
|
||||
// sload hash1 and hash2
|
||||
bytes32 hash1 = info.hash1;
|
||||
bytes8 hash2 = info.hash2;
|
||||
assembly {
|
||||
mstore(add(hash,0x20),hash1)
|
||||
mstore(add(hash,0x40),hash2)
|
||||
}
|
||||
res.hash = hash;
|
||||
res.name = refs[info.index];
|
||||
}
|
||||
|
||||
function listRefs() public view returns (refData[] memory list) {
|
||||
list = new refData[](refs.length);
|
||||
for (uint index = 0; index < refs.length; index++) {
|
||||
list[index] = _convertRefInfo(nameToRefInfo[refs[index]]);
|
||||
}
|
||||
}
|
||||
|
||||
// setRef(path: string, sha: string): Promise<Status>
|
||||
function setRef(string memory name, bytes memory refHash) public {
|
||||
|
||||
refInfo memory srs;
|
||||
srs = nameToRefInfo[name];
|
||||
|
||||
if (srs.hash1 == bytes32(0) && srs.hash2 == bytes8(0)) {
|
||||
// first store refHash
|
||||
require(refs.length <= uint256(uint192(int192(-1))),"refs exceed valid length");
|
||||
|
||||
|
||||
_setRefInfo(nameToRefInfo[name],refHash,uint192(refs.length));
|
||||
console.log("refs_length:",refs.length);
|
||||
refs.push(name);
|
||||
}else{
|
||||
// only update refHash
|
||||
_setRefInfo(nameToRefInfo[name],refHash,srs.index);
|
||||
}
|
||||
}
|
||||
|
||||
// delRef(path: string): Promise<Status>
|
||||
function delRef(string memory name) public {
|
||||
refInfo memory srs;
|
||||
srs = nameToRefInfo[name];
|
||||
|
||||
require(srs.hash1 != bytes32(0) || srs.hash2 != bytes8(0),"Reference of this name does not exist");
|
||||
|
||||
refs[srs.index] = refs[refs.length - 1];
|
||||
nameToRefInfo[refs[refs.length - 1]].index = srs.index;
|
||||
delete refs[refs.length - 1];
|
||||
delete nameToRefInfo[name];
|
||||
}
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
//SPDX-License-Identifier: Unlicense
|
||||
pragma solidity ^0.8.0;
|
||||
|
||||
interface IFileOperator {
|
||||
// Large storage methods
|
||||
function write(bytes memory name, bytes memory data) external payable;
|
||||
|
||||
function read(bytes memory name) external view returns (bytes memory, bool);
|
||||
|
||||
// return (size, # of chunks)
|
||||
function size(bytes memory name) external view returns (uint256, uint256);
|
||||
|
||||
function remove(bytes memory name) external returns (uint256);
|
||||
|
||||
function countChunks(bytes memory name) external view returns (uint256);
|
||||
|
||||
// Chunk-based large storage methods
|
||||
function writeChunk(
|
||||
bytes memory name,
|
||||
uint256 chunkId,
|
||||
bytes memory data
|
||||
) external payable;
|
||||
|
||||
function readChunk(bytes memory name, uint256 chunkId) external view returns (bytes memory, bool);
|
||||
|
||||
function chunkSize(bytes memory name, uint256 chunkId) external view returns (uint256, bool);
|
||||
|
||||
function removeChunk(bytes memory name, uint256 chunkId) external returns (bool);
|
||||
|
||||
function truncate(bytes memory name, uint256 chunkId) external returns (uint256);
|
||||
|
||||
function refund() external;
|
||||
|
||||
function destruct() external;
|
||||
|
||||
function getChunkHash(bytes memory name, uint256 chunkId) external view returns (bytes32);
|
||||
}
|
@ -0,0 +1,8 @@
|
||||
import { HardhatUserConfig } from "hardhat/config";
|
||||
import "@nomicfoundation/hardhat-toolbox";
|
||||
|
||||
const config: HardhatUserConfig = {
|
||||
solidity: "0.8.17",
|
||||
};
|
||||
|
||||
export default config;
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,27 @@
|
||||
{
|
||||
"name": "dgithub",
|
||||
"version": "1.0.0",
|
||||
"description": "Dencentralized Github",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/cyl19970726/dGithub.git"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/cyl19970726/dGithub/issues"
|
||||
},
|
||||
"homepage": "https://github.com/cyl19970726/dGithub#readme",
|
||||
"devDependencies": {
|
||||
"@nomicfoundation/hardhat-toolbox": "^2.0.0",
|
||||
"hardhat": "^2.12.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@openzeppelin/contracts": "^4.8.0",
|
||||
"evm-large-storage": "^1.0.0"
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
async function main() {
|
||||
const currentTimestampInSeconds = Math.round(Date.now() / 1000);
|
||||
const ONE_YEAR_IN_SECS = 365 * 24 * 60 * 60;
|
||||
const unlockTime = currentTimestampInSeconds + ONE_YEAR_IN_SECS;
|
||||
|
||||
const lockedAmount = ethers.utils.parseEther("1");
|
||||
|
||||
const Lock = await ethers.getContractFactory("Lock");
|
||||
const lock = await Lock.deploy(unlockTime, { value: lockedAmount });
|
||||
|
||||
await lock.deployed();
|
||||
|
||||
console.log(`Lock with 1 ETH and unlock timestamp ${unlockTime} deployed to ${lock.address}`);
|
||||
}
|
||||
|
||||
// We recommend this pattern to be able to use async/await everywhere
|
||||
// and properly handle errors.
|
||||
main().catch((error) => {
|
||||
console.error(error);
|
||||
process.exitCode = 1;
|
||||
});
|
@ -0,0 +1,100 @@
|
||||
const { web3 } = require("hardhat");
|
||||
const { expect } = require("chai");
|
||||
const { ethers } = require("hardhat");
|
||||
const { defaultAbiCoder } = require("ethers/lib/utils");
|
||||
|
||||
var ToBig = (x) => ethers.BigNumber.from(x);
|
||||
|
||||
describe("Git3 Test", function () {
|
||||
it("upload/download/remove", async function () {
|
||||
const Git3 = await ethers.getContractFactory("Git3");
|
||||
const git3 = await Git3.deploy();
|
||||
await git3.deployed();
|
||||
|
||||
await git3.upload("0x616263", "0x112233");
|
||||
expect(await git3.download("0x616263")).to.eql(["0x112233", true]);
|
||||
|
||||
let data = Array.from({ length: 40 }, () =>
|
||||
Math.floor(Math.random() * 256)
|
||||
);
|
||||
|
||||
await git3.upload("0x616263", data);
|
||||
expect(await git3.download("0x616263")).to.eql([
|
||||
ethers.utils.hexlify(data),
|
||||
true,
|
||||
]);
|
||||
|
||||
expect(await git3.size("0x616263")).to.eql([ToBig(40), ToBig(1)]);
|
||||
|
||||
|
||||
await git3.remove("0x616263");
|
||||
expect(await git3.size("0x616263")).to.eql([ToBig(0), ToBig(0)]);
|
||||
|
||||
});
|
||||
|
||||
it("upload/download/remove chunks", async function () {
|
||||
const Git3 = await ethers.getContractFactory("Git3");
|
||||
const git3 = await Git3.deploy();
|
||||
await git3.deployed();
|
||||
|
||||
expect(await git3.countChunks("0x616263")).to.eql(ToBig(0));
|
||||
|
||||
let data0 = Array.from({ length: 10 }, () =>
|
||||
Math.floor(Math.random() * 256)
|
||||
);
|
||||
await git3.uploadChunk("0x616263",0, data0);
|
||||
expect(await git3.download("0x616263")).to.eql([
|
||||
ethers.utils.hexlify(data0),
|
||||
true,
|
||||
]);
|
||||
|
||||
let data1 = Array.from({ length: 20 }, () =>
|
||||
Math.floor(Math.random() * 256)
|
||||
);
|
||||
await git3.uploadChunk("0x616263", 1, data1);
|
||||
expect(await git3.download("0x616263")).to.eql([
|
||||
ethers.utils.hexlify(data0.concat(data1)),
|
||||
true,
|
||||
]);
|
||||
|
||||
await git3.remove("0x616263"); // should succeed
|
||||
expect(await git3.size("0x616263")).to.eql([ToBig(0), ToBig(0)]);
|
||||
expect(await git3.download("0x616263")).to.eql([
|
||||
"0x",
|
||||
false,
|
||||
]);
|
||||
expect(await git3.countChunks("0x616263")).to.eql(ToBig(0));
|
||||
});
|
||||
|
||||
it("set/list Ref",async function() {
|
||||
const Git3 = await ethers.getContractFactory("Git3");
|
||||
const git3 = await Git3.deploy();
|
||||
await git3.deployed();
|
||||
|
||||
let key0 = "0x616263";
|
||||
let data0 = "0xaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabb";
|
||||
await git3.setRef(key0,data0);
|
||||
|
||||
let key1 = "0x717273";
|
||||
let data1 = "0x1111aabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaaaaaaaaaaaa";
|
||||
await git3.setRef(key1,data1);
|
||||
|
||||
let key2 = "0x818283";
|
||||
let data2 = "0x777777777777777baabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabbaabba88888888888";
|
||||
await git3.setRef(key2,data2);
|
||||
|
||||
|
||||
let refs = await git3.listRefs();
|
||||
expect(refs[0]).to.eql([data0,key0]);
|
||||
expect(refs[1]).to.eql([data1,key1]);
|
||||
expect(refs[2]).to.eql([data2,key2]);
|
||||
|
||||
// check delRef
|
||||
await git3.delRef(key0);
|
||||
refs = await git3.listRefs();
|
||||
expect(refs[0]).to.eql([data2,key2]);
|
||||
expect(refs[1]).to.eql([data1,key1]);
|
||||
|
||||
})
|
||||
|
||||
});
|
@ -0,0 +1,10 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2020",
|
||||
"module": "commonjs",
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true
|
||||
}
|
||||
}
|
Loading…
Reference in new issue