Commit 1ad20c5a authored by 董子豪's avatar 董子豪

remove filecoin-ffi

parent 95b26cd2
...@@ -11,6 +11,7 @@ import ( ...@@ -11,6 +11,7 @@ import (
"fil_integrate/build/state-types/abi" "fil_integrate/build/state-types/abi"
"fil_integrate/build/storage" "fil_integrate/build/storage"
"fil_integrate/seal" "fil_integrate/seal"
"fil_integrate/seal/basicpiece"
) )
type User struct { type User struct {
...@@ -113,7 +114,7 @@ func (u *User) ReadPieceRange( ...@@ -113,7 +114,7 @@ func (u *User) ReadPieceRange(
return nil return nil
} }
func (u *User) getPiece(ctx context.Context, pieceCommit cid.Commit) (*storage.DecodedData, error) { func (u *User) getPiece(ctx context.Context, pieceCommit cid.Commit) (*basicpiece.DecodedData, error) {
// todo: GET from chian/provider // todo: GET from chian/provider
// miner, ok := cid2sidMap[pieceCommit] // miner, ok := cid2sidMap[pieceCommit]
buf, err := u.GetPieceFromProvider(ctx, 10000, pieceCommit) buf, err := u.GetPieceFromProvider(ctx, 10000, pieceCommit)
......
#!/bin/bash #!/bin/bash
rm -f bench rm -f bench test
cd extern/filecoin-ffi cd extern/filecoin-ffi
make clean make clean
FFI_BUILD_FROM_SOURCE=1 FFI_USE_BLST=1 make env RUSTFLAGS="-C target-cpu=native -g" FFI_BUILD_FROM_SOURCE=1 FFI_USE_BLST=1 make
cd ../.. cd ../..
env RUSTFLAGS="-C target-cpu=native -g" go build -o bench ./cmd/bench/main.go go build -o bench ./cmd/bench/main.go
\ No newline at end of file go build -o test ./cmd/actor-test/main.go
\ No newline at end of file
...@@ -9,3 +9,7 @@ var Undef = Commit{} ...@@ -9,3 +9,7 @@ var Undef = Commit{}
func (c Commit) Bytes() []byte { func (c Commit) Bytes() []byte {
return c[:] return c[:]
} }
func (c Commit) Equals(x Commit) bool {
return c == x
}
package storage package storage
import ( import (
"bytes"
"encoding/binary"
"io" "io"
"golang.org/x/xerrors"
"fil_integrate/build/state-types/abi" "fil_integrate/build/state-types/abi"
"fil_integrate/build/cid" "fil_integrate/build/cid"
) )
...@@ -41,97 +37,3 @@ type Piece struct { ...@@ -41,97 +37,3 @@ type Piece struct {
Commitment cid.Commit Commitment cid.Commit
Size abi.UnpaddedPieceSize Size abi.UnpaddedPieceSize
} }
type DecodedData struct {
HasPre bool
PrePieceCommit cid.Commit
Data []byte
PieceCommit []cid.Commit
CommitData []byte
}
func (data *DecodedData) Serialize() ([]byte, error) {
var buf []byte
MetaLen := uint32(len(data.Data))
CommLen := uint32(len(data.CommitData))
if data.HasPre {
if MetaLen > 0 {
return nil, xerrors.Errorf("")
}
buf = make([]byte, nextUppandedPowerOfTwo(40+CommLen))
binary.BigEndian.PutUint32(buf[:4], 0x80000000)
binary.BigEndian.PutUint32(buf[4:8], CommLen)
copy(buf[8:40], data.PrePieceCommit[:])
copy(buf[40:], data.CommitData[:])
} else {
buf = make([]byte, nextUppandedPowerOfTwo(8+MetaLen+CommLen))
binary.BigEndian.PutUint32(buf[:4], MetaLen)
binary.BigEndian.PutUint32(buf[4:8], CommLen)
copy(buf[8:8+MetaLen], data.Data[:])
copy(buf[8+MetaLen:], data.CommitData[:])
}
return buf, nil
}
func (data *DecodedData) Deserialize(buf []byte) error {
var err error
var MetaLen uint32
var CommLen uint32
read := len(buf)
if read < 8 {
return xerrors.Errorf("can't deserialize the data less then 8bytes")
}
binary.Read(bytes.NewReader(buf[0:4]), binary.BigEndian, &MetaLen)
binary.Read(bytes.NewReader(buf[4:8]), binary.BigEndian, &CommLen)
data.HasPre = (MetaLen >> 31) != 0
MetaLen = MetaLen & 0x7fffffff
rbuf := buf[8:read]
if data.HasPre {
if read < 40 {
return xerrors.Errorf("can't read the pre-piece-hash")
}
copy(data.PrePieceCommit[:], buf[8:40])
rbuf = rbuf[32:]
}
if uint32(len(rbuf)) <= MetaLen {
data.Data = rbuf[:]
} else if uint32(len(rbuf)) <= CommLen+MetaLen {
data.Data = rbuf[:MetaLen]
data.PieceCommit, err = to32ByteHash(rbuf[MetaLen:])
if err != nil {
return err
}
} else {
data.Data = rbuf[:MetaLen]
data.PieceCommit, err = to32ByteHash(rbuf[MetaLen : CommLen+MetaLen])
if err != nil {
return err
}
}
return nil
}
func to32ByteHash(in []byte) ([]cid.Commit, error) {
if len(in)%32 != 0 {
return nil, xerrors.Errorf("lenth of the hash arr must be multiple of 32")
}
hash := make([]cid.Commit, len(in)/32)
for index := 0; index < len(hash); index++ {
copy(hash[index][:], in[index*32:index*32+32])
}
return hash, nil
}
func nextUppandedPowerOfTwo(index uint32) abi.UnpaddedPieceSize {
index--
power := 0
for index = index / 254; index != 0; power += 1 {
index >>= 1
}
return abi.UnpaddedPieceSize(254 * (1 << power))
}
This diff is collapsed.
**/*.rs.bk
**/include
**/paramcache
**/target
.install-filcrypto
filcrypto.h
filcrypto.pc
filecoin.h
filecoin.pc
*.a
simulator
This diff is collapsed.
Copyright (c) 2018 Filecoin Project
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
\ No newline at end of file
DEPS:=filcrypto.h filcrypto.pc libfilcrypto.a
all: $(DEPS)
.PHONY: all
# Create a file so that parallel make doesn't call `./install-filcrypto` for
# each of the deps
$(DEPS): .install-filcrypto ;
.install-filcrypto: rust
go clean -cache -testcache .
bash ./install-filcrypto
@touch $@
clean:
go clean -cache -testcache .
rm -rf $(DEPS) .install-filcrypto
rm -f ./runner
cd rust && cargo clean && cd ..
.PHONY: clean
go-lint: $(DEPS)
golangci-lint run -v --concurrency 2 --new-from-rev origin/master
.PHONY: go-lint
shellcheck:
shellcheck install-filcrypto
lint: shellcheck go-lint
cgo-leakdetect: runner
valgrind --leak-check=full --show-leak-kinds=definite ./runner
.PHONY: cgo-leakdetect
cgo-gen: $(DEPS)
go run github.com/xlab/c-for-go --nostamp filcrypto.yml
.PHONY: cgo-gen
runner: $(DEPS)
rm -f ./runner
go build -o ./runner ./cgoleakdetect/
.PHONY: runner
# Filecoin Proofs FFI
> C and CGO bindings for Filecoin's Rust libraries
## Building
To build and install libfilcrypto, its header file and pkg-config manifest, run:
```shell
make
```
To optionally authenticate with GitHub for assets download (to increase API limits)
set `GITHUB_TOKEN` to personal access token.
If no precompiled static library is available for your operating system, the
build tooling will attempt to compile a static library from local Rust sources.
### Installation notes
By default, building this will download a pre-built binary of the ffi. The advantages for downloading it are faster build times, and not requiring a rust toolchain and build environment.
The disadvantage to downloading the pre-built binary is that it will not be optimized for your specific hardware. This means that if raw performance is of utmost importance to you, it's highly recommended that you build from source.
### Building from Source
To opt out of downloading precompiled assets, set `FFI_BUILD_FROM_SOURCE=1`:
```shell
rm .install-filcrypto \
; make clean \
; FFI_BUILD_FROM_SOURCE=1 make
```
To allow portable building of the `blst` dependency, set `FFI_USE_BLST_PORTABLE=1`:
```shell
rm .install-filcrypto \
; make clean \
; FFI_BUILD_FROM_SOURCE=1 FFI_USE_BLST_PORTABLE=1 make
```
By default, a 'gpu' option is used in the proofs library. This feature is also used in FFI unless explicitly disabled. To disable building with the 'gpu' dependency, set `FFI_USE_GPU=0`:
```shell
rm .install-filcrypto \
; make clean \
; FFI_BUILD_FROM_SOURCE=1 FFI_USE_BLST=1 FFI_USE_GPU=0 make
```
By default, a 'multicore-sdr' option is used in the proofs library. This feature is also used in FFI unless explicitly disabled. To disable building with the 'multicore-sdr' dependency, set `FFI_USE_MULTICORE_SDR=0`:
```shell
rm .install-filcrypto \
; make clean \
; FFI_BUILD_FROM_SOURCE=1 FFI_USE_MULTICORE_SDR=0 make
```
## Updating rust-fil-proofs (via rust-filecoin-proofs-api)
If rust-fil-proofs has changed from commit X to Y and you wish to get Y into
the filecoin-ffi project, you need to do a few things:
1. Update the rust-filecoin-proofs-api [Cargo.toml][1] file to point to Y
2. Run `cd rust && cargo update -p "filecoin-proofs-api"` from the root of the filecoin-ffi project
3. After the previous step alters your Cargo.lock file, commit and push
## go get
`go get` needs some additional steps in order to work as expected.
Get the source, add this repo as a submodule to your repo, build it and point to it:
```shell
$ go get github.com/filecoin-project/filecoin-ffi
$ git submodule add https://github.com/filecoin-project/filecoin-ffi.git extern/filecoin-ffi
$ make -C extern/filecoin-ffi
$ go mod edit -replace=github.com/filecoin-project/filecoin-ffi=./extern/filecoin-ffi
```
## Updating CGO Bindings
The CGO bindings are generated using [c-for-go](https://github.com/xlab/c-for-go)
and committed to Git. To generate bindings yourself, install the c-for-go
binary, ensure that it's on your path, and then run `make cgo-gen`. CI builds
will fail if generated CGO diverges from what's checked into Git.
## Updating the Changelog
The `mkreleaselog` script (in the project root) can be used to generate a good
portion of the filecoin-ffi changelog. For historical reasons, the script must
be run from the root of a filecoin-ffi checkout which is in your `$GOPATH`.
Run it like so:
```shell
./mkreleaselog v0.25.0 v0.26.0 > /tmp/v0.26.0.notes.txt
```
## License
MIT or Apache 2.0
[1]: https://github.com/filecoin-project/rust-filecoin-proofs-api/commit/61fde0e581cc38abc4e13dbe96145c9ad2f1f0f5
# Security Policy
## Reporting a Vulnerability
For reporting *critical* and *security* bugs, please consult our [Security Policy and Responsible Disclosure Program information](https://github.com/filecoin-project/community/blob/master/SECURITY.md)
## Reporting a non security bug
For non-critical bugs, please simply file a GitHub issue on this repo.
//+build cgo
package ffi
// #cgo LDFLAGS: ${SRCDIR}/libfilcrypto.a
// #cgo pkg-config: ${SRCDIR}/filcrypto.pc
// #include "./filcrypto.h"
import "C"
import (
"github.com/filecoin-project/filecoin-ffi/generated"
)
// Hash computes the digest of a message
func Hash(message Message) Digest {
resp := generated.FilHash(message, uint(len(message)))
resp.Deref()
resp.Digest.Deref()
defer generated.FilDestroyHashResponse(resp)
var out Digest
copy(out[:], resp.Digest.Inner[:])
return out
}
// Verify verifies that a signature is the aggregated signature of digests - pubkeys
func Verify(signature *Signature, digests []Digest, publicKeys []PublicKey) bool {
// prep data
flattenedDigests := make([]byte, DigestBytes*len(digests))
for idx, digest := range digests {
copy(flattenedDigests[(DigestBytes*idx):(DigestBytes*(1+idx))], digest[:])
}
flattenedPublicKeys := make([]byte, PublicKeyBytes*len(publicKeys))
for idx, publicKey := range publicKeys {
copy(flattenedPublicKeys[(PublicKeyBytes*idx):(PublicKeyBytes*(1+idx))], publicKey[:])
}
isValid := generated.FilVerify(signature[:], flattenedDigests, uint(len(flattenedDigests)), flattenedPublicKeys, uint(len(flattenedPublicKeys)))
return isValid > 0
}
// HashVerify verifies that a signature is the aggregated signature of hashed messages.
func HashVerify(signature *Signature, messages []Message, publicKeys []PublicKey) bool {
var flattenedMessages []byte
messagesSizes := make([]uint, len(messages))
for idx := range messages {
flattenedMessages = append(flattenedMessages, messages[idx]...)
messagesSizes[idx] = uint(len(messages[idx]))
}
flattenedPublicKeys := make([]byte, PublicKeyBytes*len(publicKeys))
for idx, publicKey := range publicKeys {
copy(flattenedPublicKeys[(PublicKeyBytes*idx):(PublicKeyBytes*(1+idx))], publicKey[:])
}
isValid := generated.FilHashVerify(signature[:], flattenedMessages, uint(len(flattenedMessages)), messagesSizes, uint(len(messagesSizes)), flattenedPublicKeys, uint(len(flattenedPublicKeys)))
return isValid > 0
}
// Aggregate aggregates signatures together into a new signature. If the
// provided signatures cannot be aggregated (due to invalid input or an
// an operational error), Aggregate will return nil.
func Aggregate(signatures []Signature) *Signature {
// prep data
flattenedSignatures := make([]byte, SignatureBytes*len(signatures))
for idx, sig := range signatures {
copy(flattenedSignatures[(SignatureBytes*idx):(SignatureBytes*(1+idx))], sig[:])
}
resp := generated.FilAggregate(flattenedSignatures, uint(len(flattenedSignatures)))
if resp == nil {
return nil
}
defer generated.FilDestroyAggregateResponse(resp)
resp.Deref()
resp.Signature.Deref()
var out Signature
copy(out[:], resp.Signature.Inner[:])
return &out
}
// PrivateKeyGenerate generates a private key
func PrivateKeyGenerate() PrivateKey {
resp := generated.FilPrivateKeyGenerate()
resp.Deref()
resp.PrivateKey.Deref()
defer generated.FilDestroyPrivateKeyGenerateResponse(resp)
var out PrivateKey
copy(out[:], resp.PrivateKey.Inner[:])
return out
}
// PrivateKeyGenerate generates a private key in a predictable manner
func PrivateKeyGenerateWithSeed(seed PrivateKeyGenSeed) PrivateKey {
var ary generated.Fil32ByteArray
copy(ary.Inner[:], seed[:])
resp := generated.FilPrivateKeyGenerateWithSeed(ary)
resp.Deref()
resp.PrivateKey.Deref()
defer generated.FilDestroyPrivateKeyGenerateResponse(resp)
var out PrivateKey
copy(out[:], resp.PrivateKey.Inner[:])
return out
}
// PrivateKeySign signs a message
func PrivateKeySign(privateKey PrivateKey, message Message) *Signature {
resp := generated.FilPrivateKeySign(privateKey[:], message, uint(len(message)))
resp.Deref()
resp.Signature.Deref()
defer generated.FilDestroyPrivateKeySignResponse(resp)
var signature Signature
copy(signature[:], resp.Signature.Inner[:])
return &signature
}
// PrivateKeyPublicKey gets the public key for a private key
func PrivateKeyPublicKey(privateKey PrivateKey) PublicKey {
resp := generated.FilPrivateKeyPublicKey(privateKey[:])
resp.Deref()
resp.PublicKey.Deref()
defer generated.FilDestroyPrivateKeyPublicKeyResponse(resp)
var publicKey PublicKey
copy(publicKey[:], resp.PublicKey.Inner[:])
return publicKey
}
// CreateZeroSignature creates a zero signature, used as placeholder in filecoin.
func CreateZeroSignature() Signature {
resp := generated.FilCreateZeroSignature()
resp.Deref()
resp.Signature.Deref()
defer generated.FilDestroyZeroSignatureResponse(resp)
var sig Signature
copy(sig[:], resp.Signature.Inner[:])
return sig
}
package ffi
import (
"fmt"
"math/rand"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestDeterministicPrivateKeyGeneration(t *testing.T) {
rand.Seed(time.Now().UnixNano())
for i := 0; i < 10000; i++ {
var xs [32]byte
n, err := rand.Read(xs[:])
require.NoError(t, err)
require.Equal(t, len(xs), n)
first := PrivateKeyGenerateWithSeed(xs)
secnd := PrivateKeyGenerateWithSeed(xs)
assert.Equal(t, first, secnd)
}
}
func TestBLSSigningAndVerification(t *testing.T) {
// generate private keys
fooPrivateKey := PrivateKeyGenerate()
barPrivateKey := PrivateKeyGenerate()
// get the public keys for the private keys
fooPublicKey := PrivateKeyPublicKey(fooPrivateKey)
barPublicKey := PrivateKeyPublicKey(barPrivateKey)
// make messages to sign with the keys
fooMessage := Message("hello foo")
barMessage := Message("hello bar!")
// calculate the digests of the messages
fooDigest := Hash(fooMessage)
barDigest := Hash(barMessage)
// get the signature when signing the messages with the private keys
fooSignature := PrivateKeySign(fooPrivateKey, fooMessage)
barSignature := PrivateKeySign(barPrivateKey, barMessage)
// get the aggregateSign
aggregateSign := Aggregate([]Signature{*fooSignature, *barSignature})
// assert the foo message was signed with the foo key
assert.True(t, Verify(fooSignature, []Digest{fooDigest}, []PublicKey{fooPublicKey}))
// assert the bar message was signed with the bar key
assert.True(t, Verify(barSignature, []Digest{barDigest}, []PublicKey{barPublicKey}))
// assert the foo message was signed with the foo key
assert.True(t, HashVerify(fooSignature, []Message{fooMessage}, []PublicKey{fooPublicKey}))
// assert the bar message was signed with the bar key
assert.True(t, HashVerify(barSignature, []Message{barMessage}, []PublicKey{barPublicKey}))
// assert the foo message was not signed by the bar key
assert.False(t, Verify(fooSignature, []Digest{fooDigest}, []PublicKey{barPublicKey}))
// assert the bar/foo message was not signed by the foo/bar key
assert.False(t, Verify(barSignature, []Digest{barDigest}, []PublicKey{fooPublicKey}))
assert.False(t, Verify(barSignature, []Digest{fooDigest}, []PublicKey{barPublicKey}))
assert.False(t, Verify(fooSignature, []Digest{barDigest}, []PublicKey{fooPublicKey}))
//assert the foo and bar message was signed with the foo and bar key
assert.True(t, HashVerify(aggregateSign, []Message{fooMessage, barMessage}, []PublicKey{fooPublicKey, barPublicKey}))
//assert the bar and foo message was not signed by the foo and bar key
assert.False(t, HashVerify(aggregateSign, []Message{fooMessage, barMessage}, []PublicKey{fooPublicKey}))
}
func BenchmarkBLSVerify(b *testing.B) {
priv := PrivateKeyGenerate()
msg := Message("this is a message that i will be signing")
digest := Hash(msg)
sig := PrivateKeySign(priv, msg)
// fmt.Println("SIG SIZE: ", len(sig))
// fmt.Println("SIG: ", sig)
pubk := PrivateKeyPublicKey(priv)
b.ResetTimer()
for i := 0; i < b.N; i++ {
if !Verify(sig, []Digest{digest}, []PublicKey{pubk}) {
b.Fatal("failed to verify")
}
}
}
func TestBlsAggregateErrors(t *testing.T) {
t.Run("no signatures", func(t *testing.T) {
var empty []Signature
out := Aggregate(empty)
require.Nil(t, out)
})
t.Run("nil signatures", func(t *testing.T) {
out := Aggregate(nil)
require.Nil(t, out)
})
}
func BenchmarkBLSVerifyBatch(b *testing.B) {
b.Run("10", benchmarkBLSVerifyBatchSize(10))
b.Run("50", benchmarkBLSVerifyBatchSize(50))
b.Run("100", benchmarkBLSVerifyBatchSize(100))
b.Run("300", benchmarkBLSVerifyBatchSize(300))
b.Run("1000", benchmarkBLSVerifyBatchSize(1000))
b.Run("4000", benchmarkBLSVerifyBatchSize(4000))
}
func benchmarkBLSVerifyBatchSize(size int) func(b *testing.B) {
return func(b *testing.B) {
var digests []Digest
var msgs []Message
var sigs []Signature
var pubks []PublicKey
for i := 0; i < size; i++ {
msg := Message(fmt.Sprintf("cats cats cats cats %d %d %d dogs", i, i, i))
msgs = append(msgs, msg)
digests = append(digests, Hash(msg))
priv := PrivateKeyGenerate()
sig := PrivateKeySign(priv, msg)
sigs = append(sigs, *sig)
pubk := PrivateKeyPublicKey(priv)
pubks = append(pubks, pubk)
}
t := time.Now()
agsig := Aggregate(sigs)
fmt.Println("Aggregate took: ", time.Since(t))
b.ResetTimer()
for i := 0; i < b.N; i++ {
if !Verify(agsig, digests, pubks) {
b.Fatal("failed to verify")
}
}
}
}
func BenchmarkBLSHashAndVerify(b *testing.B) {
priv := PrivateKeyGenerate()
msg := Message("this is a message that i will be signing")
sig := PrivateKeySign(priv, msg)
// fmt.Println("SIG SIZE: ", len(sig))
// fmt.Println("SIG: ", sig)
pubk := PrivateKeyPublicKey(priv)
b.ResetTimer()
for i := 0; i < b.N; i++ {
digest := Hash(msg)
if !Verify(sig, []Digest{digest}, []PublicKey{pubk}) {
b.Fatal("failed to verify")
}
}
}
func BenchmarkBLSHashVerify(b *testing.B) {
priv := PrivateKeyGenerate()
msg := Message("this is a message that i will be signing")
sig := PrivateKeySign(priv, msg)
// fmt.Println("SIG SIZE: ", len(sig))
// fmt.Println("SIG: ", sig)
pubk := PrivateKeyPublicKey(priv)
b.ResetTimer()
for i := 0; i < b.N; i++ {
if !HashVerify(sig, []Message{msg}, []PublicKey{pubk}) {
b.Fatal("failed to verify")
}
}
}
#!/bin/bash
set -e
make clean
cd rust
rm -f Cargo.lock
cargo update -p "filecoin-proofs-api"
cargo install cbindgen
cbindgen --clean --config cbindgen.toml --crate filcrypto --output ../include/filcrypto.h
cd ..
FFI_BUILD_FROM_SOURCE=1 FFI_USE_BLST=1 make
make cgo-gen
go mod tidy
//+build cgo
package main
import (
"fmt"
"os"
ffi "github.com/filecoin-project/filecoin-ffi"
)
func main() {
os.Setenv("RUST_LOG", "info")
th := panicOnFailureTestHelper{}
ffi.WorkflowGetGPUDevicesDoesNotProduceAnError(&th)
ffi.WorkflowProofsLifecycle(&th)
ffi.WorkflowRegisteredPoStProofFunctions(&th)
ffi.WorkflowRegisteredSealProofFunctions(&th)
}
type panicOnFailureTestHelper struct{}
func (p panicOnFailureTestHelper) AssertEqual(expected, actual interface{}, msgAndArgs ...interface{}) bool {
if expected != actual {
panic(fmt.Sprintf("not equal: %+v, %+v, %+v", expected, actual, msgAndArgs))
}
return true
}
func (p panicOnFailureTestHelper) AssertNoError(err error, msgAndArgs ...interface{}) bool {
if err != nil {
panic(fmt.Sprintf("there was an error: %+v, %+v", err, msgAndArgs))
}
return true
}
func (p panicOnFailureTestHelper) AssertTrue(value bool, msgAndArgs ...interface{}) bool {
if !value {
panic(fmt.Sprintf("not true: %+v, %+v", value, msgAndArgs))
}
return true
}
func (p panicOnFailureTestHelper) RequireEqual(expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
if expected != actual {
panic(fmt.Sprintf("not equal: %+v, %+v, %+v", expected, actual, msgAndArgs))
}
}
func (p panicOnFailureTestHelper) RequireNoError(err error, msgAndArgs ...interface{}) {
if err != nil {
panic(fmt.Sprintf("there was an error: %+v, %+v", err, msgAndArgs))
}
}
func (p panicOnFailureTestHelper) RequireTrue(value bool, msgAndArgs ...interface{}) {
if !value {
panic(fmt.Sprintf("not true: %+v, %+v", value, msgAndArgs))
}
}
//+build cgo
package ffi
import (
"fil_integrate/build/state-types/abi"
"github.com/filecoin-project/filecoin-ffi/generated"
"github.com/pkg/errors"
"fil_integrate/build/proof"
)
type FallbackChallenges struct {
Sectors []abi.SectorNumber
Challenges map[abi.SectorNumber][]uint64
}
type VanillaProof []byte
// GenerateWinningPoStSectorChallenge
func GeneratePoStFallbackSectorChallenges(
proofType abi.RegisteredPoStProof,
minerID abi.ActorID,
randomness abi.PoStRandomness,
sectorIds []abi.SectorNumber,
) (*FallbackChallenges, error) {
proverID, err := toProverID(minerID)
if err != nil {
return nil, err
}
pp, err := toFilRegisteredPoStProof(proofType)
if err != nil {
return nil, err
}
secIds := make([]uint64, len(sectorIds))
for i, sid := range sectorIds {
secIds[i] = uint64(sid)
}
resp := generated.FilGenerateFallbackSectorChallenges(
pp, to32ByteArray(randomness), secIds, uint(len(secIds)),
proverID,
)
resp.Deref()
resp.IdsPtr = resp.IdsPtr[:resp.IdsLen]
resp.ChallengesPtr = resp.ChallengesPtr[:resp.ChallengesLen]
defer generated.FilDestroyGenerateFallbackSectorChallengesResponse(resp)
if resp.StatusCode != generated.FCPResponseStatusFCPNoError {
return nil, errors.New(generated.RawString(resp.ErrorMsg).Copy())
}
// copy from C memory space to Go
var out FallbackChallenges
out.Sectors = make([]abi.SectorNumber, resp.IdsLen)
out.Challenges = make(map[abi.SectorNumber][]uint64)
stride := int(resp.ChallengesStride)
for idx := range resp.IdsPtr {
secNum := abi.SectorNumber(resp.IdsPtr[idx])
out.Sectors[idx] = secNum
out.Challenges[secNum] = append([]uint64{}, resp.ChallengesPtr[idx*stride:(idx+1)*stride]...)
}
return &out, nil
}
func GenerateSingleVanillaProof(
replica PrivateSectorInfo,
challange []uint64,
) ([]byte, error) {
rep, free, err := toFilPrivateReplicaInfo(replica)
if err != nil {
return nil, err
}
defer free()
resp := generated.FilGenerateSingleVanillaProof(rep, challange, uint(len(challange)))
resp.Deref()
defer generated.FilDestroyGenerateSingleVanillaProofResponse(resp)
if resp.StatusCode != generated.FCPResponseStatusFCPNoError {
return nil, errors.New(generated.RawString(resp.ErrorMsg).Copy())
}
resp.VanillaProof.Deref()
return copyBytes(resp.VanillaProof.ProofPtr, resp.VanillaProof.ProofLen), nil
}
func GenerateWinningPoStWithVanilla(
proofType abi.RegisteredPoStProof,
minerID abi.ActorID,
randomness abi.PoStRandomness,
proofs [][]byte,
) ([]proof.PoStProof, error) {
pp, err := toFilRegisteredPoStProof(proofType)
if err != nil {
return nil, err
}
proverID, err := toProverID(minerID)
if err != nil {
return nil, err
}
fproofs, discard := toVanillaProofs(proofs)
defer discard()
resp := generated.FilGenerateWinningPostWithVanilla(
pp,
to32ByteArray(randomness),
proverID,
fproofs, uint(len(proofs)),
)
resp.Deref()
resp.ProofsPtr = make([]generated.FilPoStProof, resp.ProofsLen)
resp.Deref()
defer generated.FilDestroyGenerateWinningPostResponse(resp)
if resp.StatusCode != generated.FCPResponseStatusFCPNoError {
return nil, errors.New(generated.RawString(resp.ErrorMsg).Copy())
}
out, err := fromFilPoStProofs(resp.ProofsPtr)
if err != nil {
return nil, err
}
return out, nil
}
func GenerateWindowPoStWithVanilla(
proofType abi.RegisteredPoStProof,
minerID abi.ActorID,
randomness abi.PoStRandomness,
proofs [][]byte,
) (proof.PoStProof, error) {
pp, err := toFilRegisteredPoStProof(proofType)
if err != nil {
return proof.PoStProof{}, err
}
proverID, err := toProverID(minerID)
if err != nil {
return proof.PoStProof{}, err
}
fproofs, discard := toVanillaProofs(proofs)
defer discard()
resp := generated.FilGenerateWindowPostWithVanilla(
pp,
to32ByteArray(randomness),
proverID,
fproofs, uint(len(proofs)),
)
resp.Deref()
defer generated.FilDestroyGenerateWindowPostResponse(resp)
if resp.StatusCode != generated.FCPResponseStatusFCPNoError {
return proof.PoStProof{}, errors.New(generated.RawString(resp.ErrorMsg).Copy())
}
out, err := fromFilPoStProof(resp.Proof)
if err != nil {
return proof.PoStProof{}, err
}
return out, nil
}
---
GENERATOR:
PackageName: generated
PackageDescription:
PackageLicense:
Options:
SafeStrings: true
Includes:
- ../filcrypto.h
FlagGroups:
- {name: LDFLAGS, flags: ["-L${SRCDIR}/.."]}
- {name: pkg-config, flags: ["${SRCDIR}/../filcrypto.pc"]}
PARSER:
Defines:
IncludePaths:
- ./headerstubs/
SourcesPaths:
- ./filcrypto.h
TRANSLATOR:
ConstCharIsString: true
ConstUCharIsString: false
ConstRules:
defines: expand
enum: cgo
PtrTips:
function:
- {target: "^fil_destroy", tips: [ref]}
Rules:
global:
- {action: accept, from: "^fil"}
- {action: accept, from: "^FCPResponseStatus"}
- {transform: export}
private:
- {transform: unexport}
post-global:
- {transform: export}
- {load: snakecase}
This diff is collapsed.
// WARNING: This file has automatically been generated
// Code generated by https://git.io/c-for-go. DO NOT EDIT.
#include "../filcrypto.h"
#include <stdlib.h>
#pragma once
#define __CGOGEN 1
// WARNING: This file has automatically been generated
// Code generated by https://git.io/c-for-go. DO NOT EDIT.
package generated
/*
#cgo LDFLAGS: -L${SRCDIR}/..
#cgo pkg-config: ${SRCDIR}/../filcrypto.pc
#include "../filcrypto.h"
#include <stdlib.h>
#include "cgo_helpers.h"
*/
import "C"
// FCPResponseStatus as declared in filecoin-ffi/filcrypto.h:31
type FCPResponseStatus int32
// FCPResponseStatus enumeration from filecoin-ffi/filcrypto.h:31
const (
FCPResponseStatusFCPNoError FCPResponseStatus = C.FCPResponseStatus_FCPNoError
FCPResponseStatusFCPUnclassifiedError FCPResponseStatus = C.FCPResponseStatus_FCPUnclassifiedError
FCPResponseStatusFCPCallerError FCPResponseStatus = C.FCPResponseStatus_FCPCallerError
FCPResponseStatusFCPReceiverError FCPResponseStatus = C.FCPResponseStatus_FCPReceiverError
)
// FilRegisteredAggregationProof as declared in filecoin-ffi/filcrypto.h:35
type FilRegisteredAggregationProof int32
// FilRegisteredAggregationProof enumeration from filecoin-ffi/filcrypto.h:35
const (
FilRegisteredAggregationProofSnarkPackV1 FilRegisteredAggregationProof = C.fil_RegisteredAggregationProof_SnarkPackV1
)
// FilRegisteredPoStProof as declared in filecoin-ffi/filcrypto.h:48
type FilRegisteredPoStProof int32
// FilRegisteredPoStProof enumeration from filecoin-ffi/filcrypto.h:48
const (
FilRegisteredPoStProofStackedDrgWinning2KiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWinning2KiBV1
FilRegisteredPoStProofStackedDrgWinning8MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWinning8MiBV1
FilRegisteredPoStProofStackedDrgWinning16MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWinning16MiBV1
FilRegisteredPoStProofStackedDrgWinning32MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWinning32MiBV1
FilRegisteredPoStProofStackedDrgWinning64MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWinning64MiBV1
FilRegisteredPoStProofStackedDrgWinning128MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWinning128MiBV1
FilRegisteredPoStProofStackedDrgWinning256MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWinning256MiBV1
FilRegisteredPoStProofStackedDrgWinning512MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWinning512MiBV1
FilRegisteredPoStProofStackedDrgWinning32GiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWinning32GiBV1
FilRegisteredPoStProofStackedDrgWinning64GiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWinning64GiBV1
FilRegisteredPoStProofStackedDrgWindow2KiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWindow2KiBV1
FilRegisteredPoStProofStackedDrgWindow8MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWindow8MiBV1
FilRegisteredPoStProofStackedDrgWindow16MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWindow16MiBV1
FilRegisteredPoStProofStackedDrgWindow32MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWindow32MiBV1
FilRegisteredPoStProofStackedDrgWindow64MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWindow64MiBV1
FilRegisteredPoStProofStackedDrgWindow128MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWindow128MiBV1
FilRegisteredPoStProofStackedDrgWindow256MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWindow256MiBV1
FilRegisteredPoStProofStackedDrgWindow512MiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWindow512MiBV1
FilRegisteredPoStProofStackedDrgWindow32GiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWindow32GiBV1
FilRegisteredPoStProofStackedDrgWindow64GiBV1 FilRegisteredPoStProof = C.fil_RegisteredPoStProof_StackedDrgWindow64GiBV1
)
// FilRegisteredSealProof as declared in filecoin-ffi/filcrypto.h:61
type FilRegisteredSealProof int32
// FilRegisteredSealProof enumeration from filecoin-ffi/filcrypto.h:61
const (
FilRegisteredSealProofStackedDrg2KiBV1 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg2KiBV1
FilRegisteredSealProofStackedDrg8MiBV1 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg8MiBV1
FilRegisteredSealProofStackedDrg16MiBV1 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg16MiBV1
FilRegisteredSealProofStackedDrg32MiBV1 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg32MiBV1
FilRegisteredSealProofStackedDrg64MiBV1 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg64MiBV1
FilRegisteredSealProofStackedDrg128MiBV1 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg128MiBV1
FilRegisteredSealProofStackedDrg256MiBV1 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg256MiBV1
FilRegisteredSealProofStackedDrg512MiBV1 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg512MiBV1
FilRegisteredSealProofStackedDrg32GiBV1 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg32GiBV1
FilRegisteredSealProofStackedDrg64GiBV1 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg64GiBV1
FilRegisteredSealProofStackedDrg2KiBV11 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg2KiBV1_1
FilRegisteredSealProofStackedDrg8MiBV11 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg8MiBV1_1
FilRegisteredSealProofStackedDrg16MiBV11 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg16MiBV1_1
FilRegisteredSealProofStackedDrg32MiBV11 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg32MiBV1_1
FilRegisteredSealProofStackedDrg64MiBV11 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg64MiBV1_1
FilRegisteredSealProofStackedDrg128MiBV11 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg128MiBV1_1
FilRegisteredSealProofStackedDrg256MiBV11 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg256MiBV1_1
FilRegisteredSealProofStackedDrg512MiBV11 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg512MiBV1_1
FilRegisteredSealProofStackedDrg32GiBV11 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg32GiBV1_1
FilRegisteredSealProofStackedDrg64GiBV11 FilRegisteredSealProof = C.fil_RegisteredSealProof_StackedDrg64GiBV1_1
)
This diff is collapsed.
package generated
/*
#cgo LDFLAGS: -L${SRCDIR}/.. -lfilcrypto
#cgo pkg-config: ${SRCDIR}/../filcrypto.pc
#include "../filcrypto.h"
#include <stdlib.h>
#include "cgo_helpers.h"
*/
import "C"
This diff is collapsed.
module github.com/filecoin-project/filecoin-ffi
go 1.13
require (
github.com/filecoin-project/go-fil-commcid v0.1.0
github.com/ipfs/go-cid v0.0.7
github.com/pkg/errors v0.9.1
github.com/stretchr/testify v1.7.0
github.com/xlab/c-for-go v0.0.0-20201112171043-ea6dce5809cb
golang.org/x/tools v0.0.0-20201112185108-eeaa07dd7696 // indirect
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1
modernc.org/golex v1.0.1 // indirect
)
This diff is collapsed.
typedef unsigned char uint8_t;
typedef unsigned long long uint64_t;
typedef unsigned long int size_t;
#define bool _Bool
#!/usr/bin/env bash
# shellcheck disable=SC2155 enable=require-variable-braces
set -Exeo pipefail
auth_header=()
if [ -n "${GITHUB_TOKEN}" ]; then
auth_header=("-H" "Authorization: token ${GITHUB_TOKEN}")
fi
# set CWD to the root of filecoin-ffi
#
cd "$(dirname "${BASH_SOURCE[0]}")"
# tracks where the Rust sources are were we to build locally instead of
# downloading from GitHub Releases
#
rust_sources_dir="rust"
# an array of values passed as 'target-feature' to the Rust compiler if we're
# building an optimized libfilcrypto (which takes advantage of some perf-boosting
# instruction sets)
#
#optimized_release_rustc_target_features=$(jq -r '.[].rustc_target_feature' < "${rust_sources_dir}/rustc-target-features-optimized.json")
# each value in this area is checked against the "features" of the hosts CPU
# in order to determine if the host is suitable for an optimized release
#
cpu_features_required_for_optimized_release=$(jq -r '.[].check_cpu_for_feature | select(. != null)' < "${rust_sources_dir}/rustc-target-features-optimized.json")
main() {
local __release_flags=$(get_release_flags)
if [ "${FFI_BUILD_FROM_SOURCE}" != "1" ] && download_release_tarball __tarball_path "${rust_sources_dir}" "filecoin-ffi" "${__release_flags}"; then
local __tmp_dir=$(mktemp -d)
# silence shellcheck warning as the assignment happened in
# `download_release_tarball()`
# shellcheck disable=SC2154
# extract downloaded tarball to temporary directory
#
tar -C "${__tmp_dir}" -xzf "${__tarball_path}"
# copy build assets into root of filecoin-ffi
#
find -L "${__tmp_dir}" -type f -name filcrypto.h -exec cp -- "{}" . \;
find -L "${__tmp_dir}" -type f -name libfilcrypto.a -exec cp -- "{}" . \;
find -L "${__tmp_dir}" -type f -name filcrypto.pc -exec cp -- "{}" . \;
check_installed_files
(>&2 echo "[install-filcrypto/main] successfully installed prebuilt libfilcrypto")
else
(>&2 echo "[install-filcrypto/main] building libfilcrypto from local sources (dir = ${rust_sources_dir})")
# build libfilcrypto (and corresponding header and pkg-config)
#
build_from_source "filcrypto" "${rust_sources_dir}" "${__release_flags}"
# copy from Rust's build directory (target) to root of filecoin-ffi
#
find -L "${rust_sources_dir}/target/release" -type f -name filcrypto.h -exec cp -- "{}" . \;
find -L "${rust_sources_dir}/target/release" -type f -name libfilcrypto.a -exec cp -- "{}" . \;
find -L "${rust_sources_dir}" -type f -name filcrypto.pc -exec cp -- "{}" . \;
check_installed_files
(>&2 echo "[install-filcrypto/main] successfully built and installed libfilcrypto from source")
fi
}
download_release_tarball() {
local __resultvar=$1
local __rust_sources_path=$2
local __repo_name=$3
local __release_flags=$4
local __release_sha1=$(git rev-parse HEAD)
local __release_tag="${__release_sha1:0:16}"
local __release_tag_url="https://api.github.com/repos/filecoin-project/${__repo_name}/releases/tags/${__release_tag}"
# Download either the non-optimized standard or standard-blst release.
if [ "${FFI_USE_BLST}" == "1" ]; then
release_flag_name="standard-blst"
else
release_flag_name="standard-pairing"
fi
# TODO: This function shouldn't make assumptions about how these releases'
# names are constructed. Marginally less-bad would be to require that this
# function's caller provide the release name.
#
local __release_name="${__repo_name}-$(uname)-${release_flag_name}"
(>&2 echo "[download_release_tarball] acquiring release @ ${__release_tag}")
local __release_response=$(curl "${auth_header[@]}" \
--retry 3 \
--location "${__release_tag_url}")
local __release_url=$(echo "${__release_response}" | jq -r ".assets[] | select(.name | contains(\"${__release_name}\")) | .url")
local __tar_path="/tmp/${__release_name}_$(basename "${__release_url}").tar.gz"
if [[ -z "${__release_url}" ]]; then
(>&2 echo "[download_release_tarball] failed to download release (tag URL: ${__release_tag_url}, response: ${__release_response})")
return 1
fi
local __asset_url=$(curl "${auth_header[@]}" \
--head \
--retry 3 \
--header "Accept:application/octet-stream" \
--location \
--output /dev/null \
-w "%{url_effective}" \
"${__release_url}")
if ! curl --retry 3 --output "${__tar_path}" "${__asset_url}"; then
(>&2 echo "[download_release_tarball] failed to download release asset (tag URL: ${__release_tag_url}, asset URL: ${__asset_url})")
return 1
fi
# set $__resultvar (which the caller provided as $1), which is the poor
# man's way of returning a value from a function in Bash
#
eval "${__resultvar}='${__tar_path}'"
}
build_from_source() {
local __library_name=$1
local __rust_sources_path=$2
local __release_flags=$3
local __repo_sha1=$(git rev-parse HEAD)
local __repo_sha1_truncated="${__repo_sha1:0:16}"
(>&2 echo "building from source @ ${__repo_sha1_truncated}")
if ! [ -x "$(command -v cargo)" ]; then
(>&2 echo '[build_from_source] Error: cargo is not installed.')
(>&2 echo '[build_from_source] install Rust toolchain to resolve this problem.')
exit 1
fi
if ! [ -x "$(command -v rustup)" ]; then
(>&2 echo '[build_from_source] Error: rustup is not installed.')
(>&2 echo '[build_from_source] install Rust toolchain installer to resolve this problem.')
exit 1
fi
pushd "${__rust_sources_path}"
cargo --version
# Default to use gpu flags, unless specified to disable
gpu_flags=",gpu"
if [ "${FFI_USE_GPU}" == "0" ]; then
gpu_flags=""
fi
# Default to use multicore_sdr flags, unless specified to disable
use_multicore_sdr="--features multicore-sdr"
if [ "${FFI_USE_MULTICORE_SDR}" == "0" ]; then
use_multicore_sdr=""
fi
# Add feature specific rust flags as needed here.
if [ "${FFI_USE_BLST_PORTABLE}" == "1" ]; then
additional_flags="--no-default-features ${use_multicore_sdr} --features blst --features blst-portable${gpu_flags}"
elif [ "${FFI_USE_BLST}" == "1" ]; then
additional_flags="--no-default-features ${use_multicore_sdr} --features blst${gpu_flags}"
else
additional_flags="--no-default-features ${use_multicore_sdr} --features pairing${gpu_flags}"
fi
if [ -n "${__release_flags}" ]; then
RUSTFLAGS="-C target-feature=${__release_flags}" bash ./scripts/build-release.sh "${__library_name}" "$(cat rust-toolchain)" "${additional_flags}"
else
bash ./scripts/build-release.sh "${__library_name}" "$(cat rust-toolchain)" "${additional_flags}"
fi
popd
}
get_release_flags() {
local __features=""
# determine where to look for CPU features
#
if [[ ! -f "/proc/cpuinfo" ]]; then
(>&2 echo "[get_release_flags] no /proc/cpuinfo file; falling back to Darwin feature detection")
__features=$(sysctl -a | grep machdep.cpu | tr '[:upper:]' '[:lower:]' | grep features)
else
#aarch64_uname=$(uname -a | grep aarch64)
x86_64_uname=$(uname -a | grep x86_64)
# shellcheck disable=SC2002
if [ -n "${x86_64_uname}" ]; then
__features=$(cat /proc/cpuinfo | grep flags | head -n 1)
else
# For now we assume aarch64. If another supported platform is added, explicitly check for it
__features=$(cat /proc/cpuinfo | grep Features | head -n 1)
fi
fi
# Maps cpu flag to rust flags (related to entries in rust/rustc-target-features-optimized.json)
feature_map=("adx:+adx" "sha_ni:+sha" "sha2:+sha2" "sse2:+sse2" "avx2:+avx2" "avx:+avx" "sse4_2:+sse4.2" "sse4_1:+sse4.1")
target_features=""
# check for the presence of each required CPU feature
#
# shellcheck disable=SC2068 # the splitting is intentional
for x in ${cpu_features_required_for_optimized_release[@]}; do
current_feature=$(echo "${__features}" | grep -c "${x}")
if [ "1" = "${current_feature}" ]; then
for feature in "${feature_map[@]}"; do
key=${feature%%:*}
if [ "${key}" == "${x}" ]; then
val=${feature#*:}
if [ -z "${target_features}" ]; then
target_features="${val}"
else
target_features="${target_features},${val}"
fi
fi
done
fi
done
echo "${target_features}"
}
check_installed_files() {
if [[ ! -f "./filcrypto.h" ]]; then
(>&2 echo "[check_installed_files] failed to install filcrypto.h")
exit 1
fi
if [[ ! -f "./libfilcrypto.a" ]]; then
(>&2 echo "[check_installed_files] failed to install libfilcrypto.a")
exit 1
fi
if [[ ! -f "./filcrypto.pc" ]]; then
(>&2 echo "[check_installed_files] failed to install filcrypto.pc")
exit 1
fi
}
main "$@"; exit
#!/bin/zsh
# Note: This script is a modified version of the mkreleaselog script used by
# the go-ipfs team.
#
# Usage: ./mkreleaselog v0.25.0 v0.26.0 > /tmp/release.log
set -euo pipefail
export GO111MODULE=on
export GOPATH="$(go env GOPATH)"
alias jq="jq --unbuffered"
REPO_SUFFIXES_TO_STRIP=(
"/v2"
"/v3"
"/v4"
"/v5"
"/v6"
)
AUTHORS=(
# orgs
filecoin-project
# Authors of personal repos used by filecoin-ffi that should be mentioned in the
# release notes.
xlab
)
[[ -n "${REPO_FILTER+x}" ]] || REPO_FILTER="github.com/(${$(printf "|%s" "${AUTHORS[@]}"):1})"
[[ -n "${IGNORED_FILES+x}" ]] || IGNORED_FILES='^\(\.gx\|package\.json\|\.travis\.yml\|go.mod\|go\.sum|\.github|\.circleci\)$'
NL=$'\n'
msg() {
echo "$*" >&2
}
statlog() {
rpath="$GOPATH/src/$1"
for s in $REPO_SUFFIXES_TO_STRIP; do
rpath=${rpath%$s}
done
start="${2:-}"
end="${3:-HEAD}"
git -C "$rpath" log --shortstat --no-merges --pretty="tformat:%H%n%aN%n%aE" "$start..$end" | while
read hash
read name
read email
read _ # empty line
read changes
do
changed=0
insertions=0
deletions=0
while read count event; do
if [[ "$event" =~ ^file ]]; then
changed=$count
elif [[ "$event" =~ ^insertion ]]; then
insertions=$count
elif [[ "$event" =~ ^deletion ]]; then
deletions=$count
else
echo "unknown event $event" >&2
exit 1
fi
done<<<"${changes//,/$NL}"
jq -n \
--arg "hash" "$hash" \
--arg "name" "$name" \
--arg "email" "$email" \
--argjson "changed" "$changed" \
--argjson "insertions" "$insertions" \
--argjson "deletions" "$deletions" \
'{Commit: $hash, Author: $name, Email: $email, Files: $changed, Insertions: $insertions, Deletions: $deletions}'
done
}
# Returns a stream of deps changed between $1 and $2.
dep_changes() {
{
<"$1"
<"$2"
} | jq -s 'JOIN(INDEX(.[0][]; .Path); .[1][]; .Path; {Path: .[0].Path, Old: (.[1] | del(.Path)), New: (.[0] | del(.Path))}) | select(.New.Version != .Old.Version)'
}
# resolve_commits resolves a git ref for each version.
resolve_commits() {
jq '. + {Ref: (.Version|capture("^((?<ref1>.*)\\+incompatible|v.*-(0\\.)?[0-9]{14}-(?<ref2>[a-f0-9]{12})|(?<ref3>v.*))$") | .ref1 // .ref2 // .ref3)}'
}
pr_link() {
local repo="$1"
local prnum="$2"
local ghname="${repo##github.com/}"
printf -- "[%s#%s](https://%s/pull/%s)" "$ghname" "$prnum" "$repo" "$prnum"
}
# Generate a release log for a range of commits in a single repo.
release_log() {
setopt local_options BASH_REMATCH
local repo="$1"
local start="$2"
local end="${3:-HEAD}"
local dir="$GOPATH/src/$repo"
local commit pr
git -C "$dir" log \
--format='tformat:%H %s' \
--first-parent \
"$start..$end" |
while read commit subject; do
# Skip gx-only PRs.
git -C "$dir" diff-tree --no-commit-id --name-only "$commit^" "$commit" |
grep -v "${IGNORED_FILES}" >/dev/null || continue
if [[ "$subject" =~ '^Merge pull request #([0-9]+) from' ]]; then
local prnum="${BASH_REMATCH[2]}"
local desc="$(git -C "$dir" show --summary --format='tformat:%b' "$commit" | head -1)"
printf -- "- %s (%s)\n" "$desc" "$(pr_link "$repo" "$prnum")"
elif [[ "$subject" =~ '\(#([0-9]+)\)$' ]]; then
local prnum="${BASH_REMATCH[2]}"
printf -- "- %s (%s)\n" "$subject" "$(pr_link "$repo" "$prnum")"
else
printf -- "- %s\n" "$subject"
fi
done
}
indent() {
sed -e 's/^/ /'
}
mod_deps() {
go list -json -m all | jq 'select(.Version != null)'
}
ensure() {
local repo="$1"
for s in $REPO_SUFFIXES_TO_STRIP; do
repo=${repo%$s}
done
local commit="$2"
local rpath="$GOPATH/src/$repo"
if [[ ! -d "$rpath" ]]; then
msg "Cloning $repo..."
git clone "http://$repo" "$rpath" >&2
fi
if ! git -C "$rpath" rev-parse --verify "$commit" >/dev/null; then
msg "Fetching $repo..."
git -C "$rpath" fetch --all >&2
fi
git -C "$rpath" rev-parse --verify "$commit" >/dev/null || return 1
}
statsummary() {
jq -s 'group_by(.Author)[] | {Author: .[0].Author, Commits: (. | length), Insertions: (map(.Insertions) | add), Deletions: (map(.Deletions) | add), Files: (map(.Files) | add)}' |
jq '. + {Lines: (.Deletions + .Insertions)}'
}
recursive_release_log() {
local start="${1:-$(git tag -l | sort -V | grep -v -- '-rc' | grep 'v'| tail -n1)}"
local end="${2:-$(git rev-parse HEAD)}"
local repo_root="$(git rev-parse --show-toplevel)"
local package="$(cd "$repo_root" && go list)"
if ! [[ "${GOPATH}/${package}" != "${repo_root}" ]]; then
echo "This script requires the target package and all dependencies to live in a GOPATH."
return 1
fi
(
local result=0
local workspace="$(mktemp -d)"
trap "$(printf 'rm -rf "%q"' "$workspace")" INT TERM EXIT
cd "$workspace"
echo "Computing old deps..." >&2
git -C "$repo_root" show "$start:go.mod" >go.mod
mod_deps | resolve_commits | jq -s > old_deps.json
echo "Computing new deps..." >&2
git -C "$repo_root" show "$end:go.mod" >go.mod
mod_deps | resolve_commits | jq -s > new_deps.json
rm -f go.mod go.sum
printf -- "Generating Changelog for %s %s..%s\n" "$package" "$start" "$end" >&2
printf -- "- %s:\n" "$package"
release_log "$package" "$start" "$end" | indent
statlog "$package" "$start" "$end" > statlog.json
dep_changes old_deps.json new_deps.json |
jq --arg filter "$REPO_FILTER" 'select(.Path | match($filter))' |
# Compute changelogs
jq -r '"\(.Path) \(.New.Version) \(.New.Ref) \(.Old.Version) \(.Old.Ref // "")"' |
while read repo new new_ref old old_ref; do
for s in $REPO_SUFFIXES_TO_STRIP; do
repo=${repo%$s}
done
if ! ensure "$repo" "$new_ref"; then
result=1
local changelog="failed to fetch repo"
else
statlog "$repo" "$old_ref" "$new_ref" >> statlog.json
local changelog="$(release_log "$repo" "$old_ref" "$new_ref")"
fi
if [[ -n "$changelog" ]]; then
printf -- "- %s (%s -> %s):\n" "$repo" "$old" "$new"
echo "$changelog" | indent
fi
done
echo
echo "Contributors"
echo
echo "| Contributor | Commits | Lines ± | Files Changed |"
echo "|-------------|---------|---------|---------------|"
statsummary <statlog.json |
jq -s 'sort_by(.Lines) | reverse | .[]' |
jq -r '"| \(.Author) | \(.Commits) | +\(.Insertions)/-\(.Deletions) | \(.Files) |"'
return "$status"
)
}
recursive_release_log "$@"
{
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-0170db1f394b35d995252228ee359194b13199d259380541dc529fb0099096b0.params": {
"cid": "QmVxjFRyhmyQaZEtCh7nk2abc7LhFkzhnRX4rcHqCCpikR",
"digest": "7610b9f82bfc88405b7a832b651ce2f6",
"sector_size": 2048
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-0170db1f394b35d995252228ee359194b13199d259380541dc529fb0099096b0.vk": {
"cid": "QmcS5JZs8X3TdtkEBpHAdUYjdNDqcL7fWQFtQz69mpnu2X",
"digest": "0e0958009936b9d5e515ec97b8cb792d",
"sector_size": 2048
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-0cfb4f178bbb71cf2ecfcd42accce558b27199ab4fb59cb78f2483fe21ef36d9.params": {
"cid": "QmUiRx71uxfmUE8V3H9sWAsAXoM88KR4eo1ByvvcFNeTLR",
"digest": "1a7d4a9c8a502a497ed92a54366af33f",
"sector_size": 536870912
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-0cfb4f178bbb71cf2ecfcd42accce558b27199ab4fb59cb78f2483fe21ef36d9.vk": {
"cid": "QmfCeddjFpWtavzfEzZpJfzSajGNwfL4RjFXWAvA9TSnTV",
"digest": "4dae975de4f011f101f5a2f86d1daaba",
"sector_size": 536870912
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-3ea05428c9d11689f23529cde32fd30aabd50f7d2c93657c1d3650bca3e8ea9e.params": {
"cid": "QmcSTqDcFVLGGVYz1njhUZ7B6fkKtBumsLUwx4nkh22TzS",
"digest": "82c88066be968bb550a05e30ff6c2413",
"sector_size": 2048
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-3ea05428c9d11689f23529cde32fd30aabd50f7d2c93657c1d3650bca3e8ea9e.vk": {
"cid": "QmSTCXF2ipGA3f6muVo6kHc2URSx6PzZxGUqu7uykaH5KU",
"digest": "ffd79788d614d27919ae5bd2d94eacb6",
"sector_size": 2048
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-50c7368dea9593ed0989e70974d28024efa9d156d585b7eea1be22b2e753f331.params": {
"cid": "QmU9SBzJNrcjRFDiFc4GcApqdApN6z9X7MpUr66mJ2kAJP",
"digest": "700171ecf7334e3199437c930676af82",
"sector_size": 8388608
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-50c7368dea9593ed0989e70974d28024efa9d156d585b7eea1be22b2e753f331.vk": {
"cid": "QmbmUMa3TbbW3X5kFhExs6WgC4KeWT18YivaVmXDkB6ANG",
"digest": "79ebb55f56fda427743e35053edad8fc",
"sector_size": 8388608
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-5294475db5237a2e83c3e52fd6c2b03859a1831d45ed08c4f35dbf9a803165a9.params": {
"cid": "QmdNEL2RtqL52GQNuj8uz6mVj5Z34NVnbaJ1yMyh1oXtBx",
"digest": "c49499bb76a0762884896f9683403f55",
"sector_size": 8388608
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-5294475db5237a2e83c3e52fd6c2b03859a1831d45ed08c4f35dbf9a803165a9.vk": {
"cid": "QmUiVYCQUgr6Y13pZFr8acWpSM4xvTXUdcvGmxyuHbKhsc",
"digest": "34d4feeacd9abf788d69ef1bb4d8fd00",
"sector_size": 8388608
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-7d739b8cf60f1b0709eeebee7730e297683552e4b69cab6984ec0285663c5781.params": {
"cid": "QmVgCsJFRXKLuuUhT3aMYwKVGNA9rDeR6DCrs7cAe8riBT",
"digest": "827359440349fe8f5a016e7598993b79",
"sector_size": 536870912
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-0-0-7d739b8cf60f1b0709eeebee7730e297683552e4b69cab6984ec0285663c5781.vk": {
"cid": "QmfA31fbCWojSmhSGvvfxmxaYCpMoXP95zEQ9sLvBGHNaN",
"digest": "bd2cd62f65c1ab84f19ca27e97b7c731",
"sector_size": 536870912
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-8-0-0377ded656c6f524f1618760bffe4e0a1c51d5a70c4509eedae8a27555733edc.params": {
"cid": "QmaUmfcJt6pozn8ndq1JVBzLRjRJdHMTPd4foa8iw5sjBZ",
"digest": "2cf49eb26f1fee94c85781a390ddb4c8",
"sector_size": 34359738368
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-8-0-0377ded656c6f524f1618760bffe4e0a1c51d5a70c4509eedae8a27555733edc.vk": {
"cid": "QmR9i9KL3vhhAqTBGj1bPPC7LvkptxrH9RvxJxLN1vvsBE",
"digest": "0f8ec542485568fa3468c066e9fed82b",
"sector_size": 34359738368
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-8-0-559e581f022bb4e4ec6e719e563bf0e026ad6de42e56c18714a2c692b1b88d7e.params": {
"cid": "Qmdtczp7p4wrbDofmHdGhiixn9irAcN77mV9AEHZBaTt1i",
"digest": "d84f79a16fe40e9e25a36e2107bb1ba0",
"sector_size": 34359738368
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-8-0-559e581f022bb4e4ec6e719e563bf0e026ad6de42e56c18714a2c692b1b88d7e.vk": {
"cid": "QmZCvxKcKP97vDAk8Nxs9R1fWtqpjQrAhhfXPoCi1nkDoF",
"digest": "fc02943678dd119e69e7fab8420e8819",
"sector_size": 34359738368
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-8-2-2627e4006b67f99cef990c0a47d5426cb7ab0a0ad58fc1061547bf2d28b09def.params": {
"cid": "QmeAN4vuANhXsF8xP2Lx5j2L6yMSdogLzpcvqCJThRGK1V",
"digest": "3810b7780ac0e299b22ae70f1f94c9bc",
"sector_size": 68719476736
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-8-2-2627e4006b67f99cef990c0a47d5426cb7ab0a0ad58fc1061547bf2d28b09def.vk": {
"cid": "QmWV8rqZLxs1oQN9jxNWmnT1YdgLwCcscv94VARrhHf1T7",
"digest": "59d2bf1857adc59a4f08fcf2afaa916b",
"sector_size": 68719476736
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-8-2-b62098629d07946e9028127e70295ed996fe3ed25b0f9f88eb610a0ab4385a3c.params": {
"cid": "QmVkrXc1SLcpgcudK5J25HH93QvR9tNsVhVTYHm5UymXAz",
"digest": "2170a91ad5bae22ea61f2ea766630322",
"sector_size": 68719476736
},
"v28-proof-of-spacetime-fallback-merkletree-poseidon_hasher-8-8-2-b62098629d07946e9028127e70295ed996fe3ed25b0f9f88eb610a0ab4385a3c.vk": {
"cid": "QmbfQjPD7EpzjhWGmvWAsyN2mAZ4PcYhsf3ujuhU9CSuBm",
"digest": "6d3789148fb6466d07ee1e24d6292fd6",
"sector_size": 68719476736
},
"v28-stacked-proof-of-replication-merkletree-poseidon_hasher-8-0-0-sha256_hasher-032d3138d22506ec0082ed72b2dcba18df18477904e35bafee82b3793b06832f.params": {
"cid": "QmWceMgnWYLopMuM4AoGMvGEau7tNe5UK83XFjH5V9B17h",
"digest": "434fb1338ecfaf0f59256f30dde4968f",
"sector_size": 2048
},
"v28-stacked-proof-of-replication-merkletree-poseidon_hasher-8-0-0-sha256_hasher-032d3138d22506ec0082ed72b2dcba18df18477904e35bafee82b3793b06832f.vk": {
"cid": "QmamahpFCstMUqHi2qGtVoDnRrsXhid86qsfvoyCTKJqHr",
"digest": "dc1ade9929ade1708238f155343044ac",
"sector_size": 2048
},
"v28-stacked-proof-of-replication-merkletree-poseidon_hasher-8-0-0-sha256_hasher-6babf46ce344ae495d558e7770a585b2382d54f225af8ed0397b8be7c3fcd472.params": {
"cid": "QmYBpTt7LWNAWr1JXThV5VxX7wsQFLd1PHrGYVbrU1EZjC",
"digest": "6c77597eb91ab936c1cef4cf19eba1b3",
"sector_size": 536870912
},
"v28-stacked-proof-of-replication-merkletree-poseidon_hasher-8-0-0-sha256_hasher-6babf46ce344ae495d558e7770a585b2382d54f225af8ed0397b8be7c3fcd472.vk": {
"cid": "QmWionkqH2B6TXivzBSQeSyBxojaiAFbzhjtwYRrfwd8nH",
"digest": "065179da19fbe515507267677f02823e",
"sector_size": 536870912
},
"v28-stacked-proof-of-replication-merkletree-poseidon_hasher-8-0-0-sha256_hasher-ecd683648512ab1765faa2a5f14bab48f676e633467f0aa8aad4b55dcb0652bb.params": {
"cid": "QmPXAPPuQtuQz7Zz3MHMAMEtsYwqM1o9H1csPLeiMUQwZH",
"digest": "09e612e4eeb7a0eb95679a88404f960c",
"sector_size": 8388608
},
"v28-stacked-proof-of-replication-merkletree-poseidon_hasher-8-0-0-sha256_hasher-ecd683648512ab1765faa2a5f14bab48f676e633467f0aa8aad4b55dcb0652bb.vk": {
"cid": "QmYCuipFyvVW1GojdMrjK1JnMobXtT4zRCZs1CGxjizs99",
"digest": "b687beb9adbd9dabe265a7e3620813e4",
"sector_size": 8388608
},
"v28-stacked-proof-of-replication-merkletree-poseidon_hasher-8-8-0-sha256_hasher-82a357d2f2ca81dc61bb45f4a762807aedee1b0a53fd6c4e77b46a01bfef7820.params": {
"cid": "QmengpM684XLQfG8754ToonszgEg2bQeAGUan5uXTHUQzJ",
"digest": "6a388072a518cf46ebd661f5cc46900a",
"sector_size": 34359738368
},
"v28-stacked-proof-of-replication-merkletree-poseidon_hasher-8-8-0-sha256_hasher-82a357d2f2ca81dc61bb45f4a762807aedee1b0a53fd6c4e77b46a01bfef7820.vk": {
"cid": "Qmf93EMrADXAK6CyiSfE8xx45fkMfR3uzKEPCvZC1n2kzb",
"digest": "0c7b4aac1c40fdb7eb82bc355b41addf",
"sector_size": 34359738368
},
"v28-stacked-proof-of-replication-merkletree-poseidon_hasher-8-8-2-sha256_hasher-96f1b4a04c5c51e4759bbf224bbc2ef5a42c7100f16ec0637123f16a845ddfb2.params": {
"cid": "QmS7ye6Ri2MfFzCkcUJ7FQ6zxDKuJ6J6B8k5PN7wzSR9sX",
"digest": "1801f8a6e1b00bceb00cc27314bb5ce3",
"sector_size": 68719476736
},
"v28-stacked-proof-of-replication-merkletree-poseidon_hasher-8-8-2-sha256_hasher-96f1b4a04c5c51e4759bbf224bbc2ef5a42c7100f16ec0637123f16a845ddfb2.vk": {
"cid": "QmehSmC6BhrgRZakPDta2ewoH9nosNzdjCqQRXsNFNUkLN",
"digest": "a89884252c04c298d0b3c81bfd884164",
"sector_size": 68719476736
}
}
\ No newline at end of file
This diff is collapsed.
package ffi
import (
"bytes"
"crypto/rand"
"io"
"io/ioutil"
"math/big"
"testing"
"github.com/filecoin-project/filecoin-ffi/generated"
"github.com/stretchr/testify/assert"
commcid "github.com/filecoin-project/go-fil-commcid"
"fil_integrate/build/state-types/abi"
"github.com/stretchr/testify/require"
)
func TestRegisteredSealProofFunctions(t *testing.T) {
WorkflowRegisteredSealProofFunctions(newTestingTeeHelper(t))
}
func TestRegisteredPoStProofFunctions(t *testing.T) {
WorkflowRegisteredPoStProofFunctions(newTestingTeeHelper(t))
}
func TestProofsLifecycle(t *testing.T) {
WorkflowProofsLifecycle(newTestingTeeHelper(t))
}
func TestGetGPUDevicesDoesNotProduceAnError(t *testing.T) {
WorkflowGetGPUDevicesDoesNotProduceAnError(newTestingTeeHelper(t))
}
func TestGenerateWinningPoStSectorChallenge(t *testing.T) {
WorkflowGenerateWinningPoStSectorChallenge(newTestingTeeHelper(t))
}
func TestGenerateWinningPoStSectorChallengeEdgeCase(t *testing.T) {
WorkflowGenerateWinningPoStSectorChallengeEdgeCase(newTestingTeeHelper(t))
}
func TestJsonMarshalSymmetry(t *testing.T) {
for i := 0; i < 100; i++ {
xs := make([]publicSectorInfo, 10)
for j := 0; j < 10; j++ {
var x publicSectorInfo
var commR [32]byte
_, err := io.ReadFull(rand.Reader, commR[:])
require.NoError(t, err)
// commR is defined as 32 long above, error can be safely ignored
x.SealedCID, _ = commcid.ReplicaCommitmentV1ToCID(commR[:])
n, err := rand.Int(rand.Reader, big.NewInt(500))
require.NoError(t, err)
x.SectorNum = abi.SectorNumber(n.Uint64())
xs[j] = x
}
toSerialize := newSortedPublicSectorInfo(xs...)
serialized, err := toSerialize.MarshalJSON()
require.NoError(t, err)
var fromSerialized SortedPublicSectorInfo
err = fromSerialized.UnmarshalJSON(serialized)
require.NoError(t, err)
require.Equal(t, toSerialize, fromSerialized)
}
}
func TestDoesNotExhaustFileDescriptors(t *testing.T) {
m := 500 // loops
n := uint64(508) // quantity of piece bytes
for i := 0; i < m; i++ {
// create a temporary file over which we'll compute CommP
file, err := ioutil.TempFile("", "")
if err != nil {
panic(err)
}
// create a slice of random bytes (represents our piece)
b := make([]byte, n)
// load up our byte slice with random bytes
if _, err = rand.Read(b); err != nil {
panic(err)
}
// write buffer to temp file
if _, err := bytes.NewBuffer(b).WriteTo(file); err != nil {
panic(err)
}
// seek to beginning of file
if _, err := file.Seek(0, 0); err != nil {
panic(err)
}
if _, err = GeneratePieceCID(abi.RegisteredSealProof_StackedDrg2KiBV1, file.Name(), abi.UnpaddedPieceSize(n)); err != nil {
panic(err)
}
if err = file.Close(); err != nil {
panic(err)
}
}
}
func newTestingTeeHelper(t *testing.T) *testingTeeHelper {
return &testingTeeHelper{t: t}
}
type testingTeeHelper struct {
t *testing.T
}
func (tth *testingTeeHelper) RequireTrue(value bool, msgAndArgs ...interface{}) {
require.True(tth.t, value, msgAndArgs)
}
func (tth *testingTeeHelper) RequireNoError(err error, msgAndArgs ...interface{}) {
require.NoError(tth.t, err, msgAndArgs)
}
func (tth *testingTeeHelper) RequireEqual(expected interface{}, actual interface{}, msgAndArgs ...interface{}) {
require.Equal(tth.t, expected, actual, msgAndArgs)
}
func (tth *testingTeeHelper) AssertNoError(err error, msgAndArgs ...interface{}) bool {
return assert.NoError(tth.t, err, msgAndArgs)
}
func (tth *testingTeeHelper) AssertEqual(expected, actual interface{}, msgAndArgs ...interface{}) bool {
return assert.Equal(tth.t, expected, actual, msgAndArgs)
}
func (tth *testingTeeHelper) AssertTrue(value bool, msgAndArgs ...interface{}) bool {
return assert.True(tth.t, value, msgAndArgs)
}
func TestProofTypes(t *testing.T) {
assert.EqualValues(t, generated.FilRegisteredPoStProofStackedDrgWinning2KiBV1, abi.RegisteredPoStProof_StackedDrgWinning2KiBV1)
assert.EqualValues(t, generated.FilRegisteredPoStProofStackedDrgWinning8MiBV1, abi.RegisteredPoStProof_StackedDrgWinning8MiBV1)
assert.EqualValues(t, generated.FilRegisteredPoStProofStackedDrgWinning512MiBV1, abi.RegisteredPoStProof_StackedDrgWinning512MiBV1)
assert.EqualValues(t, generated.FilRegisteredPoStProofStackedDrgWinning32GiBV1, abi.RegisteredPoStProof_StackedDrgWinning32GiBV1)
assert.EqualValues(t, generated.FilRegisteredPoStProofStackedDrgWinning64GiBV1, abi.RegisteredPoStProof_StackedDrgWinning64GiBV1)
assert.EqualValues(t, generated.FilRegisteredPoStProofStackedDrgWindow2KiBV1, abi.RegisteredPoStProof_StackedDrgWindow2KiBV1)
assert.EqualValues(t, generated.FilRegisteredPoStProofStackedDrgWindow8MiBV1, abi.RegisteredPoStProof_StackedDrgWindow8MiBV1)
assert.EqualValues(t, generated.FilRegisteredPoStProofStackedDrgWindow512MiBV1, abi.RegisteredPoStProof_StackedDrgWindow512MiBV1)
assert.EqualValues(t, generated.FilRegisteredPoStProofStackedDrgWindow32GiBV1, abi.RegisteredPoStProof_StackedDrgWindow32GiBV1)
assert.EqualValues(t, generated.FilRegisteredPoStProofStackedDrgWindow64GiBV1, abi.RegisteredPoStProof_StackedDrgWindow64GiBV1)
assert.EqualValues(t, generated.FilRegisteredSealProofStackedDrg2KiBV1, abi.RegisteredSealProof_StackedDrg2KiBV1)
assert.EqualValues(t, generated.FilRegisteredSealProofStackedDrg8MiBV1, abi.RegisteredSealProof_StackedDrg8MiBV1)
assert.EqualValues(t, generated.FilRegisteredSealProofStackedDrg512MiBV1, abi.RegisteredSealProof_StackedDrg512MiBV1)
assert.EqualValues(t, generated.FilRegisteredSealProofStackedDrg32GiBV1, abi.RegisteredSealProof_StackedDrg32GiBV1)
assert.EqualValues(t, generated.FilRegisteredSealProofStackedDrg64GiBV1, abi.RegisteredSealProof_StackedDrg64GiBV1)
}
#!/bin/bash
RUST_LOG=info go test -count=1 ./... && cd rust && cargo test --release --all && cd ..
This diff is collapsed.
[package]
name = "filcrypto"
description = "FFI Interface to Filecoin Proofs"
version = "0.7.5"
authors = [
"nemo <nemo@protocol.ai>",
"dignifiedquire <me@dignifiedquire.com>",
"laser <l@s3r.com>"
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/filecoin-project/filecoin-ffi"
readme = "README.md"
edition = "2018"
publish = false
[lib]
crate-type = ["rlib", "staticlib"]
[dependencies]
bls-signatures = { version = "0.10.0", default-features = false, features = ["blst"] }
blstrs = { version = "0.3" }
byteorder = "1.2"
drop_struct_macro_derive = "0.5.0"
ff = { version = "0.3.1", package = "fff" }
ffi-toolkit = "0.5.0"
filepath = "0.1.1"
groupy = "0.4.1"
libc = "0.2.58"
log = "0.4.7"
fil_logger = "0.1.0"
rand = "0.7"
rand_chacha = "0.2.1"
rayon = "1.2.1"
anyhow = "1.0.23"
bellperson = { version = "0.14.1", default-features = false }
serde_json = "1.0.46"
rust-gpu-tools = "0.3.0"
storage-proofs-porep = { version = "~8.0.1", default-features = false }
[dependencies.filecoin-proofs-api]
package = "filecoin-proofs-api"
git = "https://github.com/a263200357/rust-filecoin-proofs-api"
version = "8.0.1"
default-features = false
[build-dependencies]
cbindgen = "= 0.14.0"
[dev-dependencies]
tempfile = "3.0.8"
[features]
default = ["pairing", "gpu", "multicore-sdr" ]
pairing = ["filecoin-proofs-api/pairing", "bellperson/pairing", "storage-proofs-porep/pairing"]
blst = ["filecoin-proofs-api/blst", "bellperson/blst", "storage-proofs-porep/blst"]
blst-portable = ["bls-signatures/blst-portable", "blstrs/portable"]
gpu = ["filecoin-proofs-api/gpu", "bellperson/gpu", "storage-proofs-porep/gpu"]
multicore-sdr = ["storage-proofs-porep/multicore-sdr"]
use std::env;
use std::path::Path;
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
let hdr_out = Path::new(&out_dir).join("include/filcrypto.h");
cbindgen::generate(std::env::var("CARGO_MANIFEST_DIR").unwrap())
.expect("Could not generate header")
.write_to_file(hdr_out);
}
header = """
/* filcrypto Header */
#ifdef __cplusplus
extern "C" {
#endif
"""
trailer = """
#ifdef __cplusplus
} /* extern "C" */
#endif
"""
include_guard = "filcrypto_H"
include_version = true
language = "C"
[parse]
parse_deps = true
include = ["ffi-toolkit"]
[enum]
prefix_with_name = true
Name: filcrypto
Version: @VERSION@
Description: C bindings for Filecoin Proofs
Libs: @PRIVATE_LIBS@
typedef unsigned char uint8_t;
typedef unsigned long long uint64_t;
typedef unsigned long int size_t;
#define bool _Bool
[
{
"rustc_target_feature": "+adx",
"check_cpu_for_feature": "adx"
},
{
"rustc_target_feature": "+sha",
"check_cpu_for_feature": "sha_ni"
},
{
"rustc_target_feature": "+sha2",
"check_cpu_for_feature": "sha2"
},
{
"rustc_target_feature": "+sse2",
"check_cpu_for_feature": "sse2"
},
{
"rustc_target_feature": "+avx2",
"check_cpu_for_feature": "avx2"
},
{
"rustc_target_feature": "+avx",
"check_cpu_for_feature": "avx"
},
{
"rustc_target_feature": "+sse4.2",
"check_cpu_for_feature": "sse4_2"
},
{
"rustc_target_feature": "+sse4.1",
"check_cpu_for_feature": "sse4_1"
}
]
#!/usr/bin/env bash
set -Exeo pipefail
main() {
if [[ -z "$1" ]]
then
(>&2 echo '[build-release/main] Error: script requires a library name, e.g. "filecoin" or "snark"')
exit 1
fi
if [[ -z "$2" ]]
then
(>&2 echo '[build-release/main] Error: script requires a toolchain, e.g. ./build-release.sh +nightly-2019-04-19')
exit 1
fi
# temporary place for storing build output (cannot use 'local', because
# 'trap' is not going to have access to variables scoped to this function)
#
__build_output_log_tmp=$(mktemp)
# clean up temp file on exit
#
trap '{ rm -f $__build_output_log_tmp; }' EXIT
# build with RUSTFLAGS configured to output linker flags for native libs
#
local __rust_flags="--print native-static-libs ${RUSTFLAGS}"
RUSTFLAGS="${__rust_flags}" \
cargo +$2 build \
--release ${@:3} 2>&1 | tee ${__build_output_log_tmp}
# parse build output for linker flags
#
local __linker_flags=$(cat ${__build_output_log_tmp} \
| grep native-static-libs\: \
| head -n 1 \
| cut -d ':' -f 3)
# generate pkg-config
#
sed -e "s;@VERSION@;$(git rev-parse HEAD);" \
-e "s;@PRIVATE_LIBS@;${__linker_flags};" "$1.pc.template" > "$1.pc"
# ensure header file was built
#
find -L . -type f -name "$1.h" | read
# ensure the archive file was built
#
find -L . -type f -name "lib$1.a" | read
}
main "$@"; exit
#!/usr/bin/env bash
set -Exeuo pipefail
main() {
if [[ -z "$1" ]]
then
(>&2 echo '[package-release/main] Error: script requires path to which it will write release (gzipped) tarball, e.g. "/tmp/filecoin-ffi-Darwin-standard.tar.tz"')
exit 1
fi
local __tarball_output_path=$1
# create temporary directory to hold build artifacts (must not be declared
# with 'local' because we will use 'trap' to clean it up)
#
__tmp_dir=$(mktemp -d)
(>&2 echo "[package-release/main] preparing release files")
# clean up temp directory on exit
#
trap '{ rm -rf $__tmp_dir; }' EXIT
# copy assets into temporary directory
#
find -L . -type f -name filcrypto.h -exec cp -- "{}" $__tmp_dir/ \;
find -L . -type f -name libfilcrypto.a -exec cp -- "{}" $__tmp_dir/ \;
find -L . -type f -name filcrypto.pc -exec cp -- "{}" $__tmp_dir/ \;
# create gzipped tarball from contents of temporary directory
#
tar -czf $__tarball_output_path $__tmp_dir/*
(>&2 echo "[package-release/main] release file created: $__tarball_output_path")
}
main "$@"; exit
#!/usr/bin/env bash
set -Exeuo pipefail
main() {
if [[ -z "$1" ]]
then
(>&2 echo '[publish-release/main] Error: script requires a release (gzipped) tarball path, e.g. "/tmp/filecoin-ffi-Darwin-standard.tar.tz"')
exit 1
fi
if [[ -z "$2" ]]
then
(>&2 echo '[publish-release/main] Error: script requires a release name, e.g. "filecoin-ffi-Darwin-standard" or "filecoin-ffi-Linux-optimized"')
exit 1
fi
local __release_file=$1
local __release_name=$2
local __release_tag="${CIRCLE_SHA1:0:16}"
# make sure we have a token set, api requests won't work otherwise
if [ -z $GITHUB_TOKEN ]; then
(>&2 echo "[publish-release/main] \$GITHUB_TOKEN not set, publish failed")
exit 1
fi
# see if the release already exists by tag
local __release_response=`
curl \
--header "Authorization: token $GITHUB_TOKEN" \
"https://api.github.com/repos/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/releases/tags/$__release_tag"
`
local __release_id=`echo $__release_response | jq '.id'`
if [ "$__release_id" = "null" ]; then
(>&2 echo '[publish-release/main] creating release')
RELEASE_DATA="{
\"tag_name\": \"$__release_tag\",
\"target_commitish\": \"$CIRCLE_SHA1\",
\"name\": \"$__release_tag\",
\"body\": \"\"
}"
# create it if it doesn't exist yet
#
__release_response=`
curl \
--request POST \
--header "Authorization: token $GITHUB_TOKEN" \
--header "Content-Type: application/json" \
--data "$RELEASE_DATA" \
"https://api.github.com/repos/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME/releases"
`
else
(>&2 echo '[publish-release/main] release already exists')
fi
__release_upload_url=`echo $__release_response | jq -r '.upload_url' | cut -d'{' -f1`
curl \
--request POST \
--header "Authorization: token $GITHUB_TOKEN" \
--header "Content-Type: application/octet-stream" \
--data-binary "@$__release_file" \
"$__release_upload_url?name=$(basename $__release_file)"
(>&2 echo '[publish-release/main] release file uploaded')
}
main "$@"; exit
This diff is collapsed.
This diff is collapsed.
#![deny(clippy::all)]
#![allow(clippy::missing_safety_doc)]
#![allow(clippy::upper_case_acronyms)]
pub mod bls;
pub mod proofs;
pub mod util;
This diff is collapsed.
This diff is collapsed.
mod helpers;
pub mod api;
pub mod types;
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
//+build tools
package ffi
import (
_ "github.com/xlab/c-for-go"
)
This diff is collapsed.
package ffi
// Version is most similar to semver's minor version.
// It is here as we cannot use gomod versioning due to local replace directives
// for native dependencies.
const Version int = 3
...@@ -3,22 +3,18 @@ module fil_integrate ...@@ -3,22 +3,18 @@ module fil_integrate
go 1.16 go 1.16
require ( require (
github.com/a263200357/filecoin-ffi v0.0.0-00010101000000-000000000000
github.com/docker/go-units v0.4.0 github.com/docker/go-units v0.4.0
github.com/filecoin-project/filecoin-ffi v0.30.4-0.20200910194244-f640612a1a1f
github.com/filecoin-project/go-fil-commcid v0.1.0
github.com/ipfs/go-cid v0.0.7
github.com/ipfs/go-log/v2 v2.3.0 github.com/ipfs/go-log/v2 v2.3.0
github.com/kr/pretty v0.2.0 // indirect github.com/kr/pretty v0.2.0 // indirect
github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1 github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1
github.com/minio/md5-simd v1.1.2 github.com/minio/md5-simd v1.1.2
github.com/minio/sha256-simd v0.1.1 github.com/minio/sha256-simd v0.1.1
github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/go-homedir v1.1.0
github.com/mr-tron/base58 v1.2.0 // indirect
github.com/multiformats/go-varint v0.0.6 // indirect
github.com/stretchr/testify v1.7.0 github.com/stretchr/testify v1.7.0
github.com/urfave/cli/v2 v2.3.0 github.com/urfave/cli/v2 v2.3.0
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
) )
replace github.com/filecoin-project/filecoin-ffi => ./extern/filecoin-ffi replace github.com/a263200357/filecoin-ffi => ./extern/filecoin-ffi
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -12,7 +12,7 @@ import ( ...@@ -12,7 +12,7 @@ import (
"golang.org/x/xerrors" "golang.org/x/xerrors"
ffi "github.com/filecoin-project/filecoin-ffi" ffi "github.com/a263200357/filecoin-ffi"
logging "github.com/ipfs/go-log/v2" logging "github.com/ipfs/go-log/v2"
"fil_integrate/build/cid" "fil_integrate/build/cid"
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment