main
cyhhao 2 years ago
parent 9b0f0a5473
commit e4f885a3ac

4
.gitignore vendored

@ -1,3 +1,5 @@
.DS_Store
Thumbs.db
node_modules
.env
coverage
@ -8,4 +10,6 @@ typechain-types
# Hardhat files
cache
artifacts
.vscode/
local.config.ts

@ -2,7 +2,7 @@
- RPC : https://galileo.web3q.io:8545
- ChainId : 3334
- Git3 Contract Address: 0x981cf56258Af8B6470642cBf1f991980cAa5DBf3
- Git3 Contract Address: 0x680336910D9357F6aDf26c0d61eAB8e65998Ab2d
# Sample Hardhat Project

@ -5,100 +5,114 @@ import "hardhat/console.sol";
import "./IFileOperator.sol";
import "@openzeppelin/contracts/access/Ownable.sol";
import "evm-large-storage/contracts/examples/FlatDirectory.sol";
// import "evm-large-storage/contracts/W3RC3.sol";
contract Git3 {
IFileOperator public immutable storageManager;
struct refInfo {
bytes20 hash;
uint96 index;
}
struct refData {
bytes20 hash;
string name;
}
mapping (string => refInfo) public nameToRefInfo; // dev => {hash: 0x1234..., index: 1 }
string[] public refs; // [main, dev, test, staging]
function _convertRefInfo(refInfo memory info) internal view returns(refData memory res){
res.hash = info.hash;
res.name = refs[info.index];
}
constructor() {
storageManager = IFileOperator(address(new FlatDirectory(0)));
}
function download(bytes memory path) external view returns (bytes memory, bool) {
// call flat directory(FD)
return storageManager.read(path);
}
function upload(bytes memory path, bytes memory data) external payable {
storageManager.writeChunk(path, 0, data);
}
function uploadChunk(bytes memory path,uint256 chunkId, bytes memory data) external payable {
storageManager.writeChunk(path, chunkId, data);
}
function remove(bytes memory path) external {
// The actually process of remove will remove all the chunks
storageManager.remove(path);
}
function size(bytes memory name) external view returns (uint256,uint256){
return storageManager.size(name);
}
function countChunks(bytes memory name)external view returns (uint256){
return storageManager.countChunks(name);
}
function listRefs() public view returns (refData[] memory list) {
list = new refData[](refs.length);
for (uint index = 0; index < refs.length; index++) {
list[index] = _convertRefInfo(nameToRefInfo[refs[index]]);
IFileOperator public immutable storageManager;
struct refInfo {
bytes20 hash;
uint96 index;
}
struct refData {
bytes20 hash;
string name;
}
mapping(string => refInfo) public nameToRefInfo; // dev => {hash: 0x1234..., index: 1 }
string[] public refs; // [main, dev, test, staging]
function _convertRefInfo(
refInfo memory info
) internal view returns (refData memory res) {
res.hash = info.hash;
res.name = refs[info.index];
}
constructor() {
storageManager = IFileOperator(address(new FlatDirectory(220)));
}
}
function setRef(string memory name, bytes20 refHash) public {
// only execute `sload` once to reduce gas consumption
refInfo memory srs;
srs = nameToRefInfo[name];
uint256 refsLen = refs.length;
function download(
bytes memory path
) external view returns (bytes memory, bool) {
// call flat directory(FD)
return storageManager.read(path);
}
function upload(bytes memory path, bytes memory data) external payable {
storageManager.writeChunk{value: msg.value}(path, 0, data);
}
if (srs.hash == bytes20(0)) {
// store refHash for the first time
require(refsLen <= uint256(uint96(int96(-1))),"Refs exceed valid length");
function uploadChunk(
bytes memory path,
uint256 chunkId,
bytes memory data
) external payable {
storageManager.writeChunk{value: msg.value}(path, chunkId, data);
}
nameToRefInfo[name].hash = refHash;
nameToRefInfo[name].index = uint96(refsLen);
function remove(bytes memory path) external {
// The actually process of remove will remove all the chunks
storageManager.remove(path);
}
refs.push(name);
function size(bytes memory name) external view returns (uint256, uint256) {
return storageManager.size(name);
}
}else{
// only update refHash
nameToRefInfo[name].hash = refHash;
function countChunks(bytes memory name) external view returns (uint256) {
return storageManager.countChunks(name);
}
}
function delRef(string memory name) public {
// only execute `sload` once to reduce gas consumption
refInfo memory srs;
srs = nameToRefInfo[name];
uint256 refsLen = refs.length;
function listRefs() public view returns (refData[] memory list) {
list = new refData[](refs.length);
for (uint index = 0; index < refs.length; index++) {
list[index] = _convertRefInfo(nameToRefInfo[refs[index]]);
}
}
require(srs.hash != bytes20(0),"Reference of this name does not exist");
require(srs.index < refsLen,"System Error: Invalid index");
function setRef(string memory name, bytes20 refHash) public {
// only execute `sload` once to reduce gas consumption
refInfo memory srs;
srs = nameToRefInfo[name];
uint256 refsLen = refs.length;
if (srs.hash == bytes20(0)) {
// store refHash for the first time
require(
refsLen <= uint256(uint96(int96(-1))),
"Refs exceed valid length"
);
nameToRefInfo[name].hash = refHash;
nameToRefInfo[name].index = uint96(refsLen);
refs.push(name);
} else {
// only update refHash
nameToRefInfo[name].hash = refHash;
}
}
if (srs.index < refsLen-1){
refs[srs.index] = refs[refsLen - 1];
nameToRefInfo[refs[refsLen - 1]].index = srs.index;
function delRef(string memory name) public {
// only execute `sload` once to reduce gas consumption
refInfo memory srs;
srs = nameToRefInfo[name];
uint256 refsLen = refs.length;
require(
srs.hash != bytes20(0),
"Reference of this name does not exist"
);
require(srs.index < refsLen, "System Error: Invalid index");
if (srs.index < refsLen - 1) {
refs[srs.index] = refs[refsLen - 1];
nameToRefInfo[refs[refsLen - 1]].index = srs.index;
}
refs.pop();
delete nameToRefInfo[name];
}
refs.pop();
delete nameToRefInfo[name];
}
}
}

@ -0,0 +1,20 @@
const NetworkDefinition = {
rinkeby: {
url: "https://rinkeby.infura.io/v3/*******your-api-key*******",
accounts: {
mnemonic: "test test test test test test test test test test test junk"
}
},
polygon: {
url: "https://polygon.infura.io/v3/*******your-api-key*******",
accounts: {
mnemonic: "test test test test test test test test test test test junk"
}
}
}
const EtherscanConfig = {
apiKey: "YOUR_ETHERSCAN_API_KEY"
}
export { NetworkDefinition, EtherscanConfig }

@ -4,15 +4,30 @@ require("dotenv").config();
import { HardhatUserConfig } from "hardhat/config";
import "@nomicfoundation/hardhat-toolbox";
import { NetworkDefinition, EtherscanConfig } from './local.config';
const config: HardhatUserConfig = {
solidity: "0.8.17",
solidity: {
compilers: [
{
version: "0.8.17",
settings: {
optimizer: {
enabled: true,
runs: 1000
}
}
}
],
},
networks: {
w3qGalileo: {
url: "https://galileo.web3q.io:8545",
accounts: process.env.PRIVATE_KEY !== undefined ? [process.env.PRIVATE_KEY] : [],
}
}
},
...NetworkDefinition
},
etherscan: EtherscanConfig,
};
export default config;

@ -4,7 +4,9 @@ async function main() {
const Git3 = await ethers.getContractFactory("Git3");
const git3 = await Git3.deploy();
let receipt = await git3.deployed();
console.log(receipt);
console.log(git3.address);
}
// We recommend this pattern to be able to use async/await everywhere

Binary file not shown.

After

Width:  |  Height:  |  Size: 185 KiB

@ -0,0 +1,39 @@
import hre from 'hardhat'
const { ethers } = hre;
import fs from 'fs'
async function main() {
const accounts = await ethers.getSigners();
console.log(accounts[0].address);
const Git3 = await hre.ethers.getContractAt("Git3", "0xa709975Bc01e745432f8898499E7b9a60f420117")
let storageManager = await Git3.storageManager()
console.log("storageManager", storageManager)
const flat = await hre.ethers.getContractAt("FlatDirectory", storageManager)
let owner = await flat.owner()
console.log("owner", owner)
return
let file = fs.readFileSync("test/git3.png")
let buffer = Array.from(file).slice(0, 24576)
let fileSize = buffer.length
console.log("buffer", buffer.length)
let cost = 0
if (fileSize > 24 * 1024 - 326) {
cost = Math.floor((fileSize + 326) / 1024 / 24)
}
let key = ethers.utils.toUtf8Bytes("aaa")
let rept = await Git3.upload(key, buffer, { value: ethers.utils.parseEther(cost.toString()) })
console.log("rept", "https://explorer.galileo.web3q.io/tx/" + rept.hash)
}
main().catch((error) => {
console.error(error);
process.exit(1);
});
Loading…
Cancel
Save