Release 2.0.0

This commit is contained in:
Santiago Lezica 2021-01-29 18:51:08 -03:00
parent 8107c4478b
commit cef49eff22
209 changed files with 70157 additions and 926 deletions

View File

@ -1,280 +0,0 @@
package main
import (
"fmt"
"os"
"path/filepath"
"time"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/btcjson"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btclog"
"github.com/btcsuite/btcd/rpcclient"
"github.com/btcsuite/btcutil"
_ "github.com/btcsuite/btcwallet/chain"
"github.com/btcsuite/btcwallet/walletdb"
_ "github.com/btcsuite/btcwallet/walletdb/bdb"
"github.com/btcsuite/btcd/chaincfg"
"github.com/lightninglabs/neutrino"
"github.com/lightninglabs/neutrino/headerfs"
)
// RelevantTx contains a PKScipt, an Address an a boolean to check if its spent or not
type RelevantTx struct {
PkScript []byte
Address string
Spent bool
Satoshis int64
SigningDetails signingDetails
Outpoint wire.OutPoint
}
func (tx *RelevantTx) String() string {
return fmt.Sprintf("outpoint %v:%v for %v sats on path %v",
tx.Outpoint.Hash, tx.Outpoint.Index, tx.Satoshis, tx.SigningDetails.Address.DerivationPath())
}
var (
chainParams = chaincfg.MainNetParams
bitcoinGenesisDate = chainParams.GenesisBlock.Header.Timestamp
)
var relevantTxs = make(map[wire.OutPoint]*RelevantTx)
var rescan *neutrino.Rescan
// TODO: Add signing details to the watchAddresses map
var watchAddresses = make(map[string]signingDetails)
func startRescan(chainService *neutrino.ChainService, addrs map[string]signingDetails, birthday int) []*RelevantTx {
watchAddresses = addrs
// Wait till we know where the tip is
for !chainService.IsCurrent() {
}
bestBlock, _ := chainService.BestBlock()
startHeight := findStartHeight(birthday, chainService)
fmt.Println()
fmt.Printf("Starting at height %v", startHeight.Height)
fmt.Println()
ntfn := rpcclient.NotificationHandlers{
OnBlockConnected: func(hash *chainhash.Hash, height int32, t time.Time) {
totalDif := bestBlock.Height - startHeight.Height
currentDif := height - startHeight.Height
progress := (float64(currentDif) / float64(totalDif)) * 100.0
progressBar := ""
numberOfBars := int(progress / 5)
for index := 0; index <= 20; index++ {
if index <= numberOfBars {
progressBar += "■"
} else {
progressBar += "□"
}
}
fmt.Printf("\rProgress: [%v] %.2f%%. Scanning block %v of %v.", progressBar, progress, currentDif, totalDif)
},
OnRedeemingTx: func(tx *btcutil.Tx, details *btcjson.BlockDetails) {
for _, input := range tx.MsgTx().TxIn {
outpoint := input.PreviousOutPoint
if _, ok := relevantTxs[outpoint]; ok {
relevantTxs[outpoint].Spent = true
}
}
},
OnRecvTx: func(tx *btcutil.Tx, details *btcjson.BlockDetails) {
checkOutpoints(tx, details.Height)
},
}
rescan = neutrino.NewRescan(
&neutrino.RescanChainSource{
ChainService: chainService,
},
neutrino.WatchAddrs(buildAddresses()...),
neutrino.NotificationHandlers(ntfn),
neutrino.StartBlock(startHeight),
neutrino.EndBlock(bestBlock),
)
errorChan := rescan.Start()
rescan.WaitForShutdown()
if err := <-errorChan; err != nil {
panic(err)
}
return buildUtxos()
}
func startChainService() (*neutrino.ChainService, func(), error) {
setUpLogger()
dir := os.TempDir()
dirFolder := filepath.Join(dir, "muunRecoveryTool")
os.RemoveAll(dirFolder)
os.MkdirAll(dirFolder, 0700)
dbPath := filepath.Join(dirFolder, "neutrino.db")
db, err := walletdb.Open("bdb", dbPath, true)
if err == walletdb.ErrDbDoesNotExist {
db, err = walletdb.Create("bdb", dbPath, true)
if err != nil {
panic(err)
}
}
peers := make([]string, 1)
peers[0] = "btcd-mainnet.lightning.computer"
chainService, err := neutrino.NewChainService(neutrino.Config{
DataDir: dirFolder,
Database: db,
ChainParams: chainParams,
ConnectPeers: peers,
AddPeers: peers,
})
if err != nil {
panic(err)
}
err = chainService.Start()
if err != nil {
panic(err)
}
close := func() {
db.Close()
err := chainService.Stop()
if err != nil {
panic(err)
}
os.Remove(dbPath)
os.RemoveAll(dirFolder)
}
return chainService, close, err
}
func findStartHeight(birthday int, chain *neutrino.ChainService) *headerfs.BlockStamp {
if birthday == 0 {
return &headerfs.BlockStamp{}
}
const (
// birthdayBlockDelta is the maximum time delta allowed between our
// birthday timestamp and our birthday block's timestamp when searching
// for a better birthday block candidate (if possible).
birthdayBlockDelta = 2 * time.Hour
)
birthtime := bitcoinGenesisDate.Add(time.Duration(birthday-2) * 24 * time.Hour)
block, _ := chain.BestBlock()
startHeight := int32(0)
bestHeight := block.Height
left, right := startHeight, bestHeight
for {
mid := left + (right-left)/2
hash, _ := chain.GetBlockHash(int64(mid))
header, _ := chain.GetBlockHeader(hash)
// If the search happened to reach either of our range extremes,
// then we'll just use that as there's nothing left to search.
if mid == startHeight || mid == bestHeight || mid == left {
return &headerfs.BlockStamp{
Hash: *hash,
Height: mid,
Timestamp: header.Timestamp,
}
}
// The block's timestamp is more than 2 hours after the
// birthday, so look for a lower block.
if header.Timestamp.Sub(birthtime) > birthdayBlockDelta {
right = mid
continue
}
// The birthday is more than 2 hours before the block's
// timestamp, so look for a higher block.
if header.Timestamp.Sub(birthtime) < -birthdayBlockDelta {
left = mid
continue
}
return &headerfs.BlockStamp{
Hash: *hash,
Height: mid,
Timestamp: header.Timestamp,
}
}
}
func checkOutpoints(tx *btcutil.Tx, height int32) {
// Loop in the output addresses
for index, output := range tx.MsgTx().TxOut {
_, addrs, _, _ := txscript.ExtractPkScriptAddrs(output.PkScript, &chainParams)
for _, addr := range addrs {
// If one of the output addresses is in our Watch Addresses map, we try to add it to our relevant tx model
if _, ok := watchAddresses[addr.EncodeAddress()]; ok {
hash := tx.Hash()
relevantTx := &RelevantTx{
PkScript: output.PkScript,
Address: addr.String(),
Spent: false,
Satoshis: output.Value,
SigningDetails: watchAddresses[addr.EncodeAddress()],
Outpoint: wire.OutPoint{
Hash: *hash,
Index: uint32(index),
},
}
if _, ok := relevantTxs[relevantTx.Outpoint]; ok {
// If its already there we dont need to do anything
return
}
relevantTxs[relevantTx.Outpoint] = relevantTx
}
}
}
}
func buildUtxos() []*RelevantTx {
var utxos []*RelevantTx
for _, output := range relevantTxs {
if !output.Spent {
utxos = append(utxos, output)
}
}
return utxos
}
func buildAddresses() []btcutil.Address {
addresses := make([]btcutil.Address, 0, len(watchAddresses))
for addr := range watchAddresses {
address, err := btcutil.DecodeAddress(addr, &chainParams)
if err != nil {
panic(err)
}
addresses = append(addresses, address)
}
return addresses
}
func setUpLogger() {
logger := btclog.NewBackend(os.Stdout).Logger("MUUN")
logger.SetLevel(btclog.LevelOff)
neutrino.UseLogger(logger)
}

51
cmd/survey/main.go Normal file
View File

@ -0,0 +1,51 @@
package main
import (
"fmt"
"github.com/muun/recovery/electrum"
"github.com/muun/recovery/scanner"
)
var failedToConnect []string
var withBatching []string
var withoutBatching []string
func main() {
client := electrum.NewClient()
for _, server := range scanner.PublicElectrumServers {
surveyServer(client, server)
}
fmt.Println("// With batch support:")
for _, server := range withBatching {
fmt.Printf("\"%s\"\n", server)
}
fmt.Println("// Without batch support:")
for _, server := range withoutBatching {
fmt.Printf("\"%s\"\n", server)
}
fmt.Println("// Unclassified:")
for _, server := range failedToConnect {
fmt.Printf("\"%s\"\n", server)
}
}
func surveyServer(client *electrum.Client, server string) {
fmt.Println("Surveyng", server)
err := client.Connect(server)
if err != nil {
failedToConnect = append(failedToConnect, server)
return
}
if client.SupportsBatching() {
withBatching = append(withBatching, server)
} else {
withoutBatching = append(withoutBatching, server)
}
}

392
electrum/client.go Normal file
View File

@ -0,0 +1,392 @@
package electrum
import (
"bufio"
"crypto/sha256"
"crypto/tls"
"encoding/hex"
"encoding/json"
"fmt"
"net"
"sort"
"strings"
"time"
"github.com/muun/recovery/utils"
)
const defaultLoggerTag = "Electrum/?"
const connectionTimeout = time.Second * 10
const messageDelim = byte('\n')
var implsWithBatching = []string{"ElectrumX"}
// Client is a TLS client that implements a subset of the Electrum protocol.
//
// It includes a minimal implementation of a JSON-RPC client, since the one provided by the
// standard library doesn't support features such as batching.
//
// It is absolutely not thread-safe. Every Client should have a single owner.
type Client struct {
Server string
ServerImpl string
ProtoVersion string
nextRequestID int
conn net.Conn
log *utils.Logger
}
// Request models the structure of all Electrum protocol requests.
type Request struct {
ID int `json:"id"`
Method string `json:"method"`
Params []Param `json:"params"`
}
// ErrorResponse models the structure of a generic error response.
type ErrorResponse struct {
ID int `json:"id"`
Error interface{} `json:"error"` // type varies among Electrum implementations.
}
// ServerVersionResponse models the structure of a `server.version` response.
type ServerVersionResponse struct {
ID int `json:"id"`
Result []string `json:"result"`
}
// ListUnspentResponse models a `blockchain.scripthash.listunspent` response.
type ListUnspentResponse struct {
ID int `json:"id"`
Result []UnspentRef `json:"result"`
}
// BroadcastResponse models the structure of a `blockchain.transaction.broadcast` response.
type BroadcastResponse struct {
ID int `json:"id"`
Result string `json:"result"`
}
// UnspentRef models an item in the `ListUnspentResponse` results.
type UnspentRef struct {
TxHash string `json:"tx_hash"`
TxPos int `json:"tx_pos"`
Value int `json:"value"`
Height int `json:"height"`
}
// Param is a convenience type that models an item in the `Params` array of an Request.
type Param = interface{}
// NewClient creates an initialized Client instance.
func NewClient() *Client {
return &Client{
log: utils.NewLogger(defaultLoggerTag),
}
}
// Connect establishes a TLS connection to an Electrum server.
func (c *Client) Connect(server string) error {
c.Disconnect()
c.log.SetTag("Electrum/" + server)
c.Server = server
c.log.Printf("Connecting")
err := c.establishConnection()
if err != nil {
c.Disconnect()
return c.log.Errorf("Connect failed: %w", err)
}
// Before calling it a day send a test request (trust me), and as we do identify the server:
err = c.identifyServer()
if err != nil {
c.Disconnect()
return c.log.Errorf("Identifying server failed: %w", err)
}
c.log.Printf("Identified as %s (%s)", c.ServerImpl, c.ProtoVersion)
return nil
}
// Disconnect cuts the connection (if connected) to the Electrum server.
func (c *Client) Disconnect() error {
if c.conn == nil {
return nil
}
c.log.Printf("Disconnecting")
err := c.conn.Close()
if err != nil {
return c.log.Errorf("Disconnect failed: %w", err)
}
c.conn = nil
return nil
}
// SupportsBatching returns whether this client can process batch requests.
func (c *Client) SupportsBatching() bool {
for _, implName := range implsWithBatching {
if strings.HasPrefix(c.ServerImpl, implName) {
return true
}
}
return false
}
// ServerVersion calls the `server.version` method and returns the [impl, protocol version] tuple.
func (c *Client) ServerVersion() ([]string, error) {
request := Request{
Method: "server.version",
Params: []Param{},
}
var response ServerVersionResponse
err := c.call(&request, &response)
if err != nil {
return nil, c.log.Errorf("ServerVersion failed: %w", err)
}
return response.Result, nil
}
// Broadcast calls the `blockchain.transaction.broadcast` endpoint and returns the transaction hash.
func (c *Client) Broadcast(rawTx string) (string, error) {
request := Request{
Method: "blockchain.transaction.broadcast",
Params: []Param{rawTx},
}
var response BroadcastResponse
err := c.call(&request, &response)
if err != nil {
return "", c.log.Errorf("Broadcast failed: %w", err)
}
return response.Result, nil
}
// ListUnspent calls `blockchain.scripthash.listunspent` and returns the UTXO results.
func (c *Client) ListUnspent(indexHash string) ([]UnspentRef, error) {
request := Request{
Method: "blockchain.scripthash.listunspent",
Params: []Param{indexHash},
}
var response ListUnspentResponse
err := c.call(&request, &response)
if err != nil {
return nil, c.log.Errorf("ListUnspent failed: %w", err)
}
return response.Result, nil
}
// ListUnspentBatch is like `ListUnspent`, but using batching.
func (c *Client) ListUnspentBatch(indexHashes []string) ([][]UnspentRef, error) {
requests := make([]*Request, len(indexHashes))
for i, indexHash := range indexHashes {
requests[i] = &Request{
Method: "blockchain.scripthash.listunspent",
Params: []Param{indexHash},
}
}
var responses []ListUnspentResponse
err := c.callBatch(requests, &responses)
if err != nil {
return nil, fmt.Errorf("ListUnspentBatch failed: %w", err)
}
// Don't forget to sort responses:
sort.Slice(responses, func(i, j int) bool {
return responses[i].ID < responses[j].ID
})
// Now we can collect all results:
var unspentRefs [][]UnspentRef
for _, response := range responses {
unspentRefs = append(unspentRefs, response.Result)
}
return unspentRefs, nil
}
func (c *Client) establishConnection() error {
// TODO: check if insecure is necessary
config := &tls.Config{
InsecureSkipVerify: true,
}
dialer := &net.Dialer{
Timeout: connectionTimeout,
}
conn, err := tls.DialWithDialer(dialer, "tcp", c.Server, config)
if err != nil {
return err
}
c.conn = conn
return nil
}
func (c *Client) identifyServer() error {
serverVersion, err := c.ServerVersion()
if err != nil {
return err
}
c.ServerImpl = serverVersion[0]
c.ProtoVersion = serverVersion[1]
c.log.Printf("Identified %s %s", c.ServerImpl, c.ProtoVersion)
return nil
}
// IsConnected returns whether this client is connected to a server.
// It does not guarantee the next request will succeed.
func (c *Client) IsConnected() bool {
return c.conn != nil
}
// call executes a request with JSON marshalling, and loads the response into a pointer.
func (c *Client) call(request *Request, response interface{}) error {
// Assign a fresh request ID:
request.ID = c.incRequestID()
// Serialize the request:
requestBytes, err := json.Marshal(request)
if err != nil {
return c.log.Errorf("Marshal failed %v: %w", request, err)
}
// Make the call, obtain the serialized response:
responseBytes, err := c.callRaw(requestBytes)
if err != nil {
return c.log.Errorf("Send failed %s: %w", string(requestBytes), err)
}
// Deserialize into an error, to see if there's any:
var maybeErrorResponse ErrorResponse
err = json.Unmarshal(responseBytes, &maybeErrorResponse)
if err != nil {
return c.log.Errorf("Unmarshal of potential error failed: %s %w", string(responseBytes), err)
}
if maybeErrorResponse.Error != nil {
return c.log.Errorf("Electrum error: %v", maybeErrorResponse.Error)
}
// Deserialize the response:
err = json.Unmarshal(responseBytes, response)
if err != nil {
return c.log.Errorf("Unmarshal failed %s: %w", string(responseBytes), err)
}
return nil
}
// call executes a batch request with JSON marshalling, and loads the response into a pointer.
// Response may not match request order, so callers MUST sort them by ID.
func (c *Client) callBatch(requests []*Request, response interface{}) error {
// Assign fresh request IDs:
for _, request := range requests {
request.ID = c.incRequestID()
}
// Serialize the request:
requestBytes, err := json.Marshal(requests)
if err != nil {
return c.log.Errorf("Marshal failed %v: %w", requests, err)
}
// Make the call, obtain the serialized response:
responseBytes, err := c.callRaw(requestBytes)
if err != nil {
return c.log.Errorf("Send failed %s: %w", string(requestBytes), err)
}
// Deserialize into an array of errors, to see if there's any:
var maybeErrorResponses []ErrorResponse
err = json.Unmarshal(responseBytes, &maybeErrorResponses)
if err != nil {
return c.log.Errorf("Unmarshal of potential error failed: %s %w", string(responseBytes), err)
}
// Walk the responses, returning the first error found:
for _, maybeErrorResponse := range maybeErrorResponses {
if maybeErrorResponse.Error != nil {
return c.log.Errorf("Electrum error: %v", maybeErrorResponse.Error)
}
}
// Deserialize the response:
err = json.Unmarshal(responseBytes, response)
if err != nil {
return c.log.Errorf("Unmarshal failed %s: %w", string(responseBytes), err)
}
return nil
}
// callRaw sends a raw request in bytes, and returns a raw response (or an error).
func (c *Client) callRaw(request []byte) ([]byte, error) {
c.log.Printf("Sending %s", string(request))
if !c.IsConnected() {
return nil, c.log.Errorf("Send failed %s: not connected", string(request))
}
request = append(request, messageDelim)
_, err := c.conn.Write(request)
if err != nil {
return nil, c.log.Errorf("Send failed %s: %w", string(request), err)
}
reader := bufio.NewReader(c.conn)
response, err := reader.ReadBytes(messageDelim)
if err != nil {
return nil, c.log.Errorf("Receive failed: %w", err)
}
c.log.Printf("Received %s", string(response))
return response, nil
}
func (c *Client) incRequestID() int {
c.nextRequestID++
return c.nextRequestID
}
// GetIndexHash returns the script parameter to use with Electrum, given a Bitcoin address.
func GetIndexHash(script []byte) string {
indexHash := sha256.Sum256(script)
reverse(&indexHash)
return hex.EncodeToString(indexHash[:])
}
// reverse the order of the provided byte array, in place.
func reverse(a *[32]byte) {
for i, j := 0, len(a)-1; i < j; i, j = i+1, j-1 {
a[i], a[j] = a[j], a[i]
}
}

28
electrum/pool.go Normal file
View File

@ -0,0 +1,28 @@
package electrum
// Pool provides a shared pool of Clients that callers can acquire and release, limiting
// the amount of concurrent Clients in active use.
type Pool struct {
nextClient chan *Client
}
// NewPool creates an initialized Pool with a `size` number of clients.
func NewPool(size int) *Pool {
nextClient := make(chan *Client, size)
for i := 0; i < size; i++ {
nextClient <- NewClient()
}
return &Pool{nextClient}
}
// Acquire obtains an unused Client, blocking until one is released.
func (p *Pool) Acquire() <-chan *Client {
return p.nextClient
}
// Release returns a Client to the pool, unblocking the next caller trying to `Acquire()`.
func (p *Pool) Release(client *Client) {
p.nextClient <- client
}

13
go.mod
View File

@ -1,16 +1,15 @@
module github.com/muun/recovery_tool
module github.com/muun/recovery
go 1.12
require (
github.com/btcsuite/btcd v0.21.0-beta
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f // indirect
github.com/btcsuite/btcutil v1.0.2
github.com/btcsuite/btcwallet v0.11.1-0.20200612012534-48addcd5591a
github.com/btcsuite/btcwallet/walletdb v1.3.3
github.com/lightninglabs/neutrino v0.11.1-0.20200316235139-bffc52e8f200
github.com/muun/libwallet v0.5.0
github.com/pkg/errors v0.9.1 // indirect
github.com/btcsuite/btcwallet v0.11.1-0.20200612012534-48addcd5591a // indirect
github.com/btcsuite/btcwallet/walletdb v1.3.3 // indirect
github.com/lightninglabs/neutrino v0.11.1-0.20200316235139-bffc52e8f200 // indirect
github.com/muun/libwallet v0.7.0
)
replace github.com/lightninglabs/neutrino => github.com/muun/neutrino v0.0.0-20190914162326-7082af0fa257

57
go.sum
View File

@ -20,7 +20,6 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/btcsuite/btcd v0.0.0-20190629003639-c26ffa870fd8/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI=
github.com/btcsuite/btcd v0.0.0-20190824003749-130ea5bddde3/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI=
github.com/btcsuite/btcd v0.20.0-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ=
github.com/btcsuite/btcd v0.20.1-beta h1:Ik4hyJqN8Jfyv3S4AGBOmyouMsYE3EdYODkMbQjwPGw=
github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ=
github.com/btcsuite/btcd v0.20.1-beta.0.20200513120220-b470eee47728/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ=
@ -35,9 +34,6 @@ github.com/btcsuite/btcutil v1.0.2 h1:9iZ1Terx9fMIOtq1VrwdqfsATL9MC2l8ZrUY6YZ2ut
github.com/btcsuite/btcutil v1.0.2/go.mod h1:j9HUFwoQRsZL3V4n+qG+CUnEGHOarIxfC3Le2Yhbcts=
github.com/btcsuite/btcutil/psbt v1.0.2 h1:gCVY3KxdoEVU7Q6TjusPO+GANIwVgr9yTLqM+a6CZr8=
github.com/btcsuite/btcutil/psbt v1.0.2/go.mod h1:LVveMu4VaNSkIRTZu2+ut0HDBRuYjqGocxDMNS1KuGQ=
github.com/btcsuite/btcwallet v0.10.0 h1:fFZncfYJ7VByePTGttzJc3qfCyDzU95ucZYk0M912lU=
github.com/btcsuite/btcwallet v0.10.0/go.mod h1:4TqBEuceheGNdeLNrelliLHJzmXauMM2vtWfuy1pFiM=
github.com/btcsuite/btcwallet v0.10.1-0.20191109031858-c49e7ef3ecf1/go.mod h1:4TqBEuceheGNdeLNrelliLHJzmXauMM2vtWfuy1pFiM=
github.com/btcsuite/btcwallet v0.11.1-0.20200612012534-48addcd5591a h1:AZ1Mf0gd9mgJqrTTIFUc17ep9EKUbQusVAIzJ6X+x3Q=
github.com/btcsuite/btcwallet v0.11.1-0.20200612012534-48addcd5591a/go.mod h1:9+AH3V5mcTtNXTKe+fe63fDLKGOwQbZqmvOVUef+JFE=
github.com/btcsuite/btcwallet/wallet/txauthor v1.0.0 h1:KGHMW5sd7yDdDMkCZ/JpP0KltolFsQcB973brBnfj4c=
@ -47,8 +43,6 @@ github.com/btcsuite/btcwallet/wallet/txrules v1.0.0/go.mod h1:UwQE78yCerZ313EXZw
github.com/btcsuite/btcwallet/wallet/txsizes v1.0.0 h1:6DxkcoMnCPY4E9cUDPB5tbuuf40SmmMkSQkoE8vCT+s=
github.com/btcsuite/btcwallet/wallet/txsizes v1.0.0/go.mod h1:pauEU8UuMFiThe5PB3EO+gO5kx87Me5NvdQDsTuq6cs=
github.com/btcsuite/btcwallet/walletdb v1.0.0/go.mod h1:bZTy9RyYZh9fLnSua+/CD48TJtYJSHjjYcSaszuxCCk=
github.com/btcsuite/btcwallet/walletdb v1.1.0 h1:JHAL7wZ8pX4SULabeAv/wPO9sseRWMGzE80lfVmRw6Y=
github.com/btcsuite/btcwallet/walletdb v1.1.0/go.mod h1:bZTy9RyYZh9fLnSua+/CD48TJtYJSHjjYcSaszuxCCk=
github.com/btcsuite/btcwallet/walletdb v1.3.1/go.mod h1:9cwc1Yyg4uvd4ZdfdoMnALji+V9gfWSMfxEdLdR5Vwc=
github.com/btcsuite/btcwallet/walletdb v1.3.2/go.mod h1:GZCMPNpUu5KE3ASoVd+k06p/1OW8OwNGCCaNWRto2cQ=
github.com/btcsuite/btcwallet/walletdb v1.3.3 h1:u6e7vRIKBF++cJy+hOHaMGg+88ZTwvpaY27AFvtB668=
@ -57,7 +51,6 @@ github.com/btcsuite/btcwallet/wtxmgr v1.0.0 h1:aIHgViEmZmZfe0tQQqF1xyd2qBqFWxX5v
github.com/btcsuite/btcwallet/wtxmgr v1.0.0/go.mod h1:vc4gBprll6BP0UJ+AIGDaySoc7MdAmZf8kelfNb8CFY=
github.com/btcsuite/btcwallet/wtxmgr v1.2.0 h1:ZUYPsSv8GjF9KK7lboB2OVHF0uYEcHxgrCfFWqPd9NA=
github.com/btcsuite/btcwallet/wtxmgr v1.2.0/go.mod h1:h8hkcKUE3X7lMPzTUoGnNiw5g7VhGrKEW3KpR2r0VnY=
github.com/btcsuite/fastsha256 v0.0.0-20160815193821-637e65642941/go.mod h1:QcFA8DZHtuIAdYKCq/BzELOaznRsCvwf4zTPmaYwaig=
github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd h1:R/opQEbFEy9JGkIguV40SvRY1uliPX8ifOvi6ICsFCw=
github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg=
github.com/btcsuite/golangcrypto v0.0.0-20150304025918-53f62d9b43e8/go.mod h1:tYvUd8KLhm/oXvUeSEs2VlLghFjQt9+ZaF9ghH0JNjc=
@ -83,12 +76,15 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/decred/dcrd/lru v1.0.0 h1:Kbsb1SFDsIlaupWPwsPp+dkxiBY1frcS07PCPgotKz8=
github.com/decred/dcrd/lru v1.0.0/go.mod h1:mxKOwFd7lFjN2GZYsiz/ecgqR6kkYAl+0pz0tEMk218=
github.com/denisenkom/go-mssqldb v0.0.0-20181014144952-4e0d7dc8888f/go.mod h1:xN/JuLBIz4bjkxNmByTiV1IbhfnYb6oo99phBn4Eqhc=
github.com/denisenkom/go-mssqldb v0.0.0-20191124224453-732737034ffd h1:83Wprp6ROGeiHFAP8WJdI2RoxALQYgdllERc3N5N2DM=
github.com/denisenkom/go-mssqldb v0.0.0-20191124224453-732737034ffd/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y=
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0=
github.com/frankban/quicktest v1.2.2/go.mod h1:Qh/WofXFeiAFII1aEBu529AtJo6Zg2VHscnEsbBnJ20=
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w=
@ -99,10 +95,12 @@ github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V
github.com/go-openapi/errors v0.19.2/go.mod h1:qX0BLWsyaKfvhluLejVpVNwNRdXZhEbTA4kxxpKBC94=
github.com/go-openapi/strfmt v0.19.5/go.mod h1:eftuHTlB/dI8Uq8JJOyRlieZf+WkkxUuk0dgdHXr2Qk=
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY=
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E=
@ -124,12 +122,18 @@ github.com/google/go-cmp v0.2.1-0.20190312032427-6f77996f0c42/go.mod h1:8QqcDgzr
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0 h1:/QaMHBdZ26BB3SSst0Iwl10Epc+xhTquomWX0oZEB6w=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v0.0.0-20170724004829-f2862b476edc/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/grpc-ecosystem/grpc-gateway v1.8.6/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/hhrutter/lzw v0.0.0-20190827003112-58b82c5a41cc/go.mod h1:yJBvOcu1wLQ9q9XZmfiPfur+3dQJuIhYQsMGLYcItZk=
github.com/hhrutter/lzw v0.0.0-20190829144645-6f07a24e8650 h1:1yY/RQWNSBjJe2GDCIYoLmpWVidrooriUr4QS/zaATQ=
github.com/hhrutter/lzw v0.0.0-20190829144645-6f07a24e8650/go.mod h1:yJBvOcu1wLQ9q9XZmfiPfur+3dQJuIhYQsMGLYcItZk=
github.com/hhrutter/tiff v0.0.0-20190829141212-736cae8d0bc7 h1:o1wMw7uTNyA58IlEdDpxIrtFHTgnvYzA8sCQz8luv94=
github.com/hhrutter/tiff v0.0.0-20190829141212-736cae8d0bc7/go.mod h1:WkUxfS2JUu3qPo6tRld7ISb8HiC0gVSU91kooBMDVok=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/jackpal/gateway v1.0.5/go.mod h1:lTpwd4ACLXmpyiCTRtfiNyVnUmqT9RivzCDQetPfnjA=
github.com/jackpal/go-nat-pmp v0.0.0-20170405195558-28a68d0c24ad/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc=
@ -144,7 +148,9 @@ github.com/jinzhu/inflection v0.0.0-20180308033659-04140366298a/go.mod h1:h+uFLl
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v0.0.0-20181116074157-8ec929ed50c3/go.mod h1:oHTiXerJ20+SfYcrdlBO7rzZRJWGwSTQ0iUY2jI6Gfc=
github.com/jinzhu/now v1.0.1 h1:HjfetcXq097iXP0uoPCdnM4Efp5/9MsM0/M+XOTeR3M=
github.com/jinzhu/now v1.0.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/jrick/logrotate v1.0.0 h1:lQ1bL/n9mBNeIXoTUoYRlK4dHuNJVofX9oWqBtPnSzI=
github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ=
@ -162,21 +168,19 @@ github.com/kkdai/bstream v0.0.0-20181106074824-b3251f7901ec h1:n1NeQ3SgUHyISrjFF
github.com/kkdai/bstream v0.0.0-20181106074824-b3251f7901ec/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.1.1 h1:sJZmqHoEaY7f+NPP8pgLB/WxulyR3fewgCM2qaSlBb4=
github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lightninglabs/gozmq v0.0.0-20190710231225-cea2a031735d h1:tt8hwvxl6fksSfchjBGaWu+pnWJQfG1OWiCM20qOSAE=
github.com/lightninglabs/gozmq v0.0.0-20190710231225-cea2a031735d/go.mod h1:vxmQPeIQxPf6Jf9rM8R+B4rKBqLA2AjttNxkFBL2Plk=
github.com/lightninglabs/gozmq v0.0.0-20191113021534-d20a764486bf h1:HZKvJUHlcXI/f/O0Avg7t8sqkPo78HFzjmeYFl6DPnc=
github.com/lightninglabs/gozmq v0.0.0-20191113021534-d20a764486bf/go.mod h1:vxmQPeIQxPf6Jf9rM8R+B4rKBqLA2AjttNxkFBL2Plk=
github.com/lightninglabs/protobuf-hex-display v1.3.3-0.20191212020323-b444784ce75d/go.mod h1:KDb67YMzoh4eudnzClmvs2FbiLG9vxISmLApUkCa4uI=
github.com/lightningnetwork/lightning-onion v0.0.0-20190909101754-850081b08b6a/go.mod h1:rigfi6Af/KqsF7Za0hOgcyq2PNH4AN70AaMRxcJkff4=
github.com/lightningnetwork/lightning-onion v1.0.1 h1:qChGgS5+aPxFeR6JiUsGvanei1bn6WJpYbvosw/1604=
github.com/lightningnetwork/lightning-onion v1.0.1/go.mod h1:rigfi6Af/KqsF7Za0hOgcyq2PNH4AN70AaMRxcJkff4=
github.com/lightningnetwork/lnd v0.8.0-beta h1:HmmhSRTq48qobqQF8YLqNa8eKU8dDBNbWWpr2VzycJM=
github.com/lightningnetwork/lnd v0.8.0-beta/go.mod h1:nq06y2BDv7vwWeMmwgB7P3pT7/Uj7sGf5FzHISVD6t4=
github.com/lightningnetwork/lnd v0.10.4-beta h1:Af2zOCPePeaU8Tkl8IqtTjr4BP3zYfi+hAtQYcCMM58=
github.com/lightningnetwork/lnd v0.10.4-beta/go.mod h1:4d02pduRVtZwgTJ+EimKJTsEAY0jDwi0SPE9h5aRneM=
github.com/lightningnetwork/lnd/cert v1.0.2/go.mod h1:fmtemlSMf5t4hsQmcprSoOykypAPp+9c+0d0iqTScMo=
@ -201,21 +205,25 @@ github.com/miekg/dns v0.0.0-20171125082028-79bfde677fa8/go.mod h1:W1PPwlIAgtquWB
github.com/miekg/dns v1.1.29 h1:xHBEhR+t5RzcFJjBLJlax2daXOrTYtr9z4WdKEfWFzg=
github.com/miekg/dns v1.1.29/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/muun/libwallet v0.5.0 h1:3YcUuQsnViXdrXntBwV3sLH2RKHC5uNODhuawp+2dg8=
github.com/muun/libwallet v0.5.0/go.mod h1:EdLg8d1sGJ4q4VUKRJyfNDBnbWc+rs5b8pHHu6KF5LY=
github.com/muun/libwallet v0.7.0 h1:FfPt+L7WN02qIgG9oJgVc9wBs7fw9w6PgHOsEI56o60=
github.com/muun/libwallet v0.7.0/go.mod h1:CB5ooFhTjbewO1YlP74Hnlf1PHWZhTU58g7LU3c2+fw=
github.com/muun/neutrino v0.0.0-20190914162326-7082af0fa257 h1:NW17wq2gZlEFeW3/Zx3wSmqlD0wKGf7YvhpP+CNCsbE=
github.com/muun/neutrino v0.0.0-20190914162326-7082af0fa257/go.mod h1:awTrhbCWjWNH4yVwZ4IE7nZbvpQ27e7OyD+jao7wRxA=
github.com/muun/recovery v0.3.0 h1:YyCXcuGx+SluVa0bHsyaXiowB67rdpJ6AudKv8QGvEE=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v1.4.1/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
github.com/onsi/gomega v1.4.3 h1:RE1xgDvH7imwFD45h+u2SgIfERHlS2yNG4DObb5BSKU=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/pdfcpu/pdfcpu v0.3.8 h1:wdKii186dzmr/aP/fkJl2s9yT3TZcwc1VqgfabNymGI=
github.com/pdfcpu/pdfcpu v0.3.8/go.mod h1:EfJ1EIo3n5+YlGF53DGe1yF1wQLiqK1eqGDN5LuKALs=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso=
@ -236,6 +244,7 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
github.com/tv42/zbase32 v0.0.0-20160707012821-501572607d02/go.mod h1:tHlrkM198S068ZqfrO6S8HsoJq2bF3ETfTL+kt4tInY=
@ -251,8 +260,6 @@ golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnf
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7 h1:0hQKqeLdqlt5iIwVOBErRisrHJAN57yOiPRQItI20fU=
golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191205180655-e7c4368fe9dd/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
@ -262,6 +269,9 @@ golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL
golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20190823064033-3a9bac650e44/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5 h1:QelT11PB4FXiDEXucrfNckHoFxwt8USGY1ajP1ZF5lM=
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
@ -294,6 +304,7 @@ golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -311,6 +322,7 @@ golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2 h1:z99zHgr7hKfrUcX/KsoJk5FJfjTceCKIp96+biqP4To=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@ -324,6 +336,7 @@ golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapK
golang.org/x/tools v0.0.0-20200117012304-6edc0a871e69/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
@ -349,8 +362,10 @@ google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/errgo.v1 v1.0.1/go.mod h1:3NjfXwocQRYAPTq4/fzX+CwUhPRcR/azYRhj8G+LqMo=
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/gormigrate.v1 v1.6.0 h1:XpYM6RHQPmzwY7Uyu+t+xxMXc86JYFJn4nEc9HzQjsI=
gopkg.in/gormigrate.v1 v1.6.0/go.mod h1:Lf00lQrHqfSYWiTtPcyQabsDdM6ejZaMgV0OU6JMSlw=
@ -358,10 +373,14 @@ gopkg.in/macaroon-bakery.v2 v2.0.1/go.mod h1:B4/T17l+ZWGwxFSZQmlBwp25x+og7OkhETf
gopkg.in/macaroon.v2 v2.0.0/go.mod h1:+I6LnTMkm/uV5ew/0nsulNjL16SK4+C8yDmRUzHR17I=
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=

View File

@ -1,31 +1,44 @@
package main
import (
"encoding/hex"
log "log"
"github.com/btcsuite/btcutil/base58"
"github.com/muun/libwallet"
)
func buildExtendedKey(rawKey, recoveryCode string) *libwallet.DecryptedPrivateKey {
salt := extractSalt(rawKey)
var defaultNetwork = libwallet.Mainnet()
func buildExtendedKeys(rawKey1, rawKey2, recoveryCode string) (
*libwallet.DecryptedPrivateKey,
*libwallet.DecryptedPrivateKey) {
// Always take the salt from the second key (the same salt was used, but our older key format
// is missing the salt on the first key):
salt := extractSalt(rawKey2)
decryptionKey, err := libwallet.RecoveryCodeToKey(recoveryCode, salt)
if err != nil {
log.Fatalf("failed to process recovery code: %v", err)
}
walletKey, err := decryptionKey.DecryptKey(rawKey, libwallet.Mainnet())
key1, err := decryptionKey.DecryptKey(rawKey1, defaultNetwork)
if err != nil {
log.Fatalf("failed to decrypt key: %v", err)
log.Fatalf("failed to decrypt first key: %v", err)
}
return walletKey
key2, err := decryptionKey.DecryptKey(rawKey2, defaultNetwork)
if err != nil {
log.Fatalf("failed to decrypt second key: %v", err)
}
return key1, key2
}
func extractSalt(rawKey string) string {
bytes := base58.Decode(rawKey)
saltBytes := bytes[len(bytes)-8:]
return string(saltBytes)
return hex.EncodeToString(saltBytes)
}

50
main.go
View File

@ -8,39 +8,36 @@ import (
"strings"
"github.com/btcsuite/btcutil"
"github.com/muun/libwallet"
)
func main() {
chainService, close, _ := startChainService()
defer close()
printWelcomeMessage()
recoveryCode := readRecoveryCode()
userRawKey := readKey("first encrypted private key", 147)
userKey := buildExtendedKey(userRawKey, recoveryCode)
userKey.Key.Path = "m/1'/1'"
userRawKey := readKey("first encrypted private key")
muunRawKey := readKey("second encrypted private key")
muunRawKey := readKey("second encrypted private key", 147)
muunKey := buildExtendedKey(muunRawKey, recoveryCode)
userKey, muunKey := buildExtendedKeys(userRawKey, muunRawKey, recoveryCode)
userKey.Key.Path = "m/1'/1'"
sweepAddress := readSweepAddress()
fmt.Println("")
fmt.Println("Preparing to scan the blockchain from your wallet creation block")
fmt.Println("Note that only confirmed transactions can be detected")
fmt.Println("\nThis may take a while")
fmt.Println("\nStarting scan of all your addresses. This may take a while")
sweeper := Sweeper{
ChainService: chainService,
UserKey: userKey.Key,
MuunKey: muunKey.Key,
Birthday: muunKey.Birthday,
SweepAddress: sweepAddress,
}
utxos := sweeper.GetUTXOs()
utxos, err := sweeper.GetUTXOs()
if err != nil {
exitWithError(err)
}
fmt.Println("")
@ -131,21 +128,29 @@ func readRecoveryCode() string {
return finalRC
}
func readKey(keyType string, characters int) string {
func readKey(keyType string) string {
fmt.Println("")
fmt.Printf("Enter your %v", keyType)
fmt.Println()
fmt.Println("(it looks like this: '9xzpc7y6sNtRvh8Fh...')")
fmt.Print("> ")
userInput := scanMultiline(characters)
// NOTE:
// Users will most likely copy and paste their keys from the Emergency Kit PDF. In this case,
// input will come suddenly in multiple lines, so a simple scan & retry (let's say 3 lines
// were pasted) will attempt to parse a key and fail 2 times in a row, with leftover characters
// until the user presses enter to fail for a 3rd time.
if len(userInput) != characters {
fmt.Printf("Your %v must have %v characters", keyType, characters)
fmt.Println("")
fmt.Println("Please, try again")
// Given the line lengths actually found in our Emergency Kits, we have a simple solution for now:
// scan a minimum length of characters. Pasing from current versions of the Emergency Kit will
// only go past a minimum length when the key being entered is complete, in all cases.
userInput := scanMultiline(libwallet.EncodedKeyLengthLegacy)
return readKey(keyType, characters)
if len(userInput) < libwallet.EncodedKeyLengthLegacy {
// This is obviously invalid. Other problems will be detected later on, during the actual
// decoding and decryption stage.
fmt.Println("The key you entered doesn't look valid\nPlease, try again")
return readKey(keyType)
}
return userInput
@ -239,3 +244,8 @@ func scanMultiline(minChars int) string {
return result.String()
}
func exitWithError(reason error) {
fmt.Println("\nError while scanning. Can't continue. Please, try again later.")
os.Exit(1)
}

193
scanner/scanner.go Normal file
View File

@ -0,0 +1,193 @@
package scanner
import (
"sync"
"time"
"github.com/muun/libwallet"
"github.com/muun/recovery/electrum"
"github.com/muun/recovery/utils"
)
const electrumPoolSize = 3
const taskTimeout = 2 * time.Minute
const batchSize = 100
// Scanner finds unspent outputs and their transactions when given a map of addresses.
//
// It implements multi-server support, batching feature detection and use, concurrency control,
// timeouts and cancelations, and provides a channel-based interface.
//
// Servers are provided by a ServerProvider instance, and rotated when unreachable or faulty. We
// trust ServerProvider to prioritize good targets.
//
// Batching is leveraged when supported by a particular server, falling back to sequential requests
// for single addresses (which is much slower, but can get us out of trouble when better servers are
// not available).
//
// Timeouts and cancellations are an internal affair, not configurable by callers. See taskTimeout
// declared above.
//
// Concurrency control works by using an electrum.Pool, limiting access to clients, and not an
// internal worker pool. This is the Go way (limiting access to resources rather than having a fixed
// number of parallel goroutines), and (more to the point) semantically correct. We don't care
// about the number of concurrent workers, what we want to avoid is too many connections to
// Electrum servers.
type Scanner struct {
pool *electrum.Pool
servers *ServerProvider
log *utils.Logger
}
// Utxo references a transaction output, plus the associated MuunAddress and script.
type Utxo struct {
TxID string
OutputIndex int
Amount int
Address libwallet.MuunAddress
Script []byte
}
// scanContext contains the synchronization objects for a single Scanner round, to manage Tasks.
type scanContext struct {
addresses chan libwallet.MuunAddress
results chan Utxo
errors chan error
done chan struct{}
wg *sync.WaitGroup
}
// NewScanner creates an initialized Scanner.
func NewScanner() *Scanner {
return &Scanner{
pool: electrum.NewPool(electrumPoolSize),
servers: NewServerProvider(),
log: utils.NewLogger("Scanner"),
}
}
// Scan an address space and return all relevant transactions for a sweep.
func (s *Scanner) Scan(addresses chan libwallet.MuunAddress) ([]Utxo, error) {
var results []Utxo
var waitGroup sync.WaitGroup
// Create the Context that goroutines will share:
ctx := &scanContext{
addresses: addresses,
results: make(chan Utxo),
errors: make(chan error),
done: make(chan struct{}),
wg: &waitGroup,
}
// Start the scan in background:
go s.startScan(ctx)
// Collect all results until the done signal, or abort on the first error:
for {
select {
case err := <-ctx.errors:
close(ctx.done) // send the done signal ourselves
return nil, err
case result := <-ctx.results:
results = append(results, result)
case <-ctx.done:
return results, nil
}
}
}
func (s *Scanner) startScan(ctx *scanContext) {
s.log.Printf("Scan started")
batches := streamBatches(ctx.addresses)
var client *electrum.Client
for batch := range batches {
// Stop the loop until a client becomes available, or the scan is canceled:
select {
case <-ctx.done:
return
case client = <-s.pool.Acquire():
}
// Start scanning this address in background:
ctx.wg.Add(1)
go func(batch []libwallet.MuunAddress) {
defer s.pool.Release(client)
defer ctx.wg.Done()
s.scanBatch(ctx, client, batch)
}(batch)
}
// Wait for all tasks that are still executing to complete:
ctx.wg.Wait()
s.log.Printf("Scan complete")
// Signal to the Scanner that this Context has no more pending work:
close(ctx.done)
}
func (s *Scanner) scanBatch(ctx *scanContext, client *electrum.Client, batch []libwallet.MuunAddress) {
// NOTE:
// We begin by building the task, passing our selected Client. Since we're choosing the instance,
// it's our job to control acquisition and release of Clients to prevent sharing (remember,
// clients are single-user). The task won't enforce this safety measure (it can't), it's fully
// up to us.
task := &scanTask{
servers: s.servers,
client: client,
addresses: batch,
timeout: taskTimeout,
exit: ctx.done,
}
// Do the thing:
addressResults, err := task.Execute()
if err != nil {
ctx.errors <- s.log.Errorf("Scan failed: %w", err)
return
}
// Send back all results:
for _, result := range addressResults {
ctx.results <- result
}
}
func streamBatches(addresses chan libwallet.MuunAddress) chan []libwallet.MuunAddress {
batches := make(chan []libwallet.MuunAddress)
go func() {
var nextBatch []libwallet.MuunAddress
for address := range addresses {
// Add items to the batch until we reach the limit:
nextBatch = append(nextBatch, address)
if len(nextBatch) < batchSize {
continue
}
// Send back the batch and start over:
batches <- nextBatch
nextBatch = []libwallet.MuunAddress{}
}
// Send back an incomplete batch with any remaining addresses:
if len(nextBatch) > 0 {
batches <- nextBatch
}
close(batches)
}()
return batches
}

97
scanner/servers.go Normal file
View File

@ -0,0 +1,97 @@
package scanner
import "sync/atomic"
// ServerProvider manages a rotating server list, from which callers can pull server addresses.
type ServerProvider struct {
nextIndex int32
}
// NewServerProvider returns an initialized ServerProvider.
func NewServerProvider() *ServerProvider {
return &ServerProvider{-1}
}
// NextServer returns an address from the rotating list. It's thread-safe.
func (p *ServerProvider) NextServer() string {
index := int(atomic.AddInt32(&p.nextIndex, 1))
return PublicElectrumServers[index%len(PublicElectrumServers)]
}
// PublicElectrumServers list.
//
// This list was taken from the `electrum` repository, keeping TLS servers and excluding onion URIs.
// It was then sorted into sections using the `cmd/survey` program, to prioritize the more reliable
// servers with batch support.
//
// See https://github.com/spesmilo/electrum/blob/master/electrum/servers.json
// See `cmd/survey/main.go`
//
var PublicElectrumServers = []string{
// With batch support:
"electrum.hsmiths.com:50002",
"E-X.not.fyi:50002",
"VPS.hsmiths.com:50002",
"btc.cihar.com:50002",
"e.keff.org:50002",
"electrum.qtornado.com:50002",
"electrum.emzy.de:50002",
"tardis.bauerj.eu:50002",
"electrum.hodlister.co:50002",
"electrum3.hodlister.co:50002",
"electrum5.hodlister.co:50002",
"fortress.qtornado.com:443",
"electrumx.erbium.eu:50002",
"bitcoin.lukechilds.co:50002",
"electrum.bitkoins.nl:50512",
// Without batch support:
"electrum.aantonop.com:50002",
"electrum.blockstream.info:50002",
"blockstream.info:700",
// Unclassified:
"81-7-10-251.blue.kundencontroller.de:50002",
"b.ooze.cc:50002",
"bitcoin.corgi.party:50002",
"bitcoins.sk:50002",
"btc.xskyx.net:50002",
"electrum.jochen-hoenicke.de:50005",
"dragon085.startdedicated.de:50002",
"e-1.claudioboxx.com:50002",
"electrum-server.ninja:50002",
"electrum-unlimited.criptolayer.net:50002",
"electrum.eff.ro:50002",
"electrum.festivaldelhumor.org:50002",
"electrum.leblancnet.us:50002",
"electrum.mindspot.org:50002",
"electrum.taborsky.cz:50002",
"electrum.villocq.com:50002",
"electrum2.eff.ro:50002",
"electrum2.villocq.com:50002",
"electrumx.bot.nu:50002",
"electrumx.ddns.net:50002",
"electrumx.ftp.sh:50002",
"electrumx.soon.it:50002",
"elx01.knas.systems:50002",
"fedaykin.goip.de:50002",
"fn.48.org:50002",
"ndnd.selfhost.eu:50002",
"orannis.com:50002",
"rbx.curalle.ovh:50002",
"technetium.network:50002",
"tomscryptos.com:50002",
"ulrichard.ch:50002",
"vmd27610.contaboserver.net:50002",
"vmd30612.contaboserver.net:50002",
"xray587.startdedicated.de:50002",
"yuio.top:50002",
"bitcoin.dragon.zone:50004",
"ecdsa.net:110",
"btc.usebsv.com:50006",
"e2.keff.org:50002",
"electrumx.electricnewyear.net:50002",
"green-gold.westeurope.cloudapp.azure.com:56002",
"electrumx-core.1209k.com:50002",
"bitcoin.aranguren.org:50002",
}

180
scanner/task.go Normal file
View File

@ -0,0 +1,180 @@
package scanner
import (
"fmt"
"time"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcutil"
"github.com/muun/libwallet"
"github.com/muun/recovery/electrum"
)
// scanTask encapsulates a parallelizable Scanner unit of work.
type scanTask struct {
servers *ServerProvider
client *electrum.Client
addresses []libwallet.MuunAddress
timeout time.Duration
exit chan struct{}
}
// Execute obtains the Utxo set for the Task address, implementing a retry strategy.
func (t *scanTask) Execute() ([]Utxo, error) {
results := make(chan []Utxo)
errors := make(chan error)
timeout := time.After(t.timeout)
// Keep the last error around, in case we reach the timeout and want to know the reason:
var lastError error
for {
// Attempt to run the task:
go t.tryExecuteAsync(results, errors)
// Wait until a result is sent, the timeout is reached or the task canceled, capturing errors
// errors along the way:
select {
case <-t.exit:
return []Utxo{}, nil // stop retrying when we get the done signal
case result := <-results:
return result, nil
case err := <-errors:
lastError = err
case <-timeout:
return nil, fmt.Errorf("Task timed out. Last error: %w", lastError)
}
}
}
func (t *scanTask) tryExecuteAsync(results chan []Utxo, errors chan error) {
// Errors will almost certainly arise from Electrum server failures, which are extremely
// common. Unreachable IPs, dropped connections, sudden EOFs, etc. We'll run this task, assuming
// the servers are at fault when something fails, disconnecting and cycling them as we retry.
result, err := t.tryExecute()
if err != nil {
t.client.Disconnect()
errors <- err
return
}
results <- result
}
func (t *scanTask) tryExecute() ([]Utxo, error) {
// If our client is not connected, make an attempt to connect to a server:
if !t.client.IsConnected() {
err := t.client.Connect(t.servers.NextServer())
if err != nil {
return nil, err
}
}
// Prepare the output scripts for all given addresses:
outputScripts, err := getOutputScripts(t.addresses)
if err != nil {
return nil, err
}
// Prepare the index hashes that Electrum requires to list outputs:
indexHashes, err := getIndexHashes(outputScripts)
if err != nil {
return nil, err
}
// Call Electrum to get the unspent output list, grouped by index for each address:
var unspentRefGroups [][]electrum.UnspentRef
if t.client.SupportsBatching() {
unspentRefGroups, err = t.listUnspentWithBatching(indexHashes)
} else {
unspentRefGroups, err = t.listUnspentWithoutBatching(indexHashes)
}
if err != nil {
return nil, err
}
// Compile the results into a list of `Utxos`:
var utxos []Utxo
for i, unspentRefGroup := range unspentRefGroups {
for _, unspentRef := range unspentRefGroup {
newUtxo := Utxo{
TxID: unspentRef.TxHash,
OutputIndex: unspentRef.TxPos,
Amount: unspentRef.Value,
Script: outputScripts[i],
Address: t.addresses[i],
}
utxos = append(utxos, newUtxo)
}
}
return utxos, nil
}
func (t *scanTask) listUnspentWithBatching(indexHashes []string) ([][]electrum.UnspentRef, error) {
unspentRefGroups, err := t.client.ListUnspentBatch(indexHashes)
if err != nil {
return nil, fmt.Errorf("Listing with batching failed: %w", err)
}
return unspentRefGroups, nil
}
func (t *scanTask) listUnspentWithoutBatching(indexHashes []string) ([][]electrum.UnspentRef, error) {
var unspentRefGroups [][]electrum.UnspentRef
for _, indexHash := range indexHashes {
newGroup, err := t.client.ListUnspent(indexHash)
if err != nil {
return nil, fmt.Errorf("Listing without batching failed: %w", err)
}
unspentRefGroups = append(unspentRefGroups, newGroup)
}
return unspentRefGroups, nil
}
// getIndexHashes calculates all the Electrum index hashes for a list of output scripts.
func getIndexHashes(outputScripts [][]byte) ([]string, error) {
indexHashes := make([]string, len(outputScripts))
for i, outputScript := range outputScripts {
indexHashes[i] = electrum.GetIndexHash(outputScript)
}
return indexHashes, nil
}
// getOutputScripts creates all the scripts that send to an list of Bitcoin address.
func getOutputScripts(addresses []libwallet.MuunAddress) ([][]byte, error) {
outputScripts := make([][]byte, len(addresses))
for i, address := range addresses {
rawAddress := address.Address()
decodedAddress, err := btcutil.DecodeAddress(rawAddress, &chaincfg.MainNetParams)
if err != nil {
return nil, fmt.Errorf("Failed to decode address %s: %w", rawAddress, err)
}
outputScript, err := txscript.PayToAddrScript(decodedAddress)
if err != nil {
return nil, fmt.Errorf("Failed to craft script for %s: %w", rawAddress, err)
}
outputScripts[i] = outputScript
}
return outputScripts, nil
}

View File

@ -1,30 +1,78 @@
package main
import (
"bytes"
"encoding/hex"
"fmt"
"github.com/muun/recovery/electrum"
"github.com/muun/recovery/scanner"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/chaincfg/chainhash"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/lightninglabs/neutrino"
"github.com/muun/libwallet"
)
var (
chainParams = chaincfg.MainNetParams
)
type Sweeper struct {
ChainService *neutrino.ChainService
UserKey *libwallet.HDPrivateKey
MuunKey *libwallet.HDPrivateKey
Birthday int
SweepAddress btcutil.Address
}
func (s *Sweeper) GetUTXOs() []*RelevantTx {
g := NewAddressGenerator(s.UserKey, s.MuunKey)
g.Generate()
// RelevantTx contains a PKScipt, an Address an a boolean to check if its spent or not
type RelevantTx struct {
PkScript []byte
Address string
Spent bool
Satoshis int64
SigningDetails signingDetails
Outpoint wire.OutPoint
}
birthday := s.Birthday
if birthday == 0xFFFF {
birthday = 0
func (tx *RelevantTx) String() string {
return fmt.Sprintf("outpoint %v:%v for %v sats on path %v",
tx.Outpoint.Hash, tx.Outpoint.Index, tx.Satoshis, tx.SigningDetails.Address.DerivationPath())
}
func (s *Sweeper) GetUTXOs() ([]*RelevantTx, error) {
addresses := s.generateAddresses()
results, err := scanner.NewScanner().Scan(addresses)
if err != nil {
return nil, fmt.Errorf("error while scanning addresses: %w", err)
}
return startRescan(s.ChainService, g.Addresses(), birthday)
txs, err := buildRelevantTxs(results)
if err != nil {
return nil, fmt.Errorf("error while crafting transaction: %w", err)
}
return txs, nil
}
func (s *Sweeper) generateAddresses() chan libwallet.MuunAddress {
ch := make(chan libwallet.MuunAddress)
go func() {
g := NewAddressGenerator(s.UserKey, s.MuunKey)
g.Generate()
for _, details := range g.Addresses() {
ch <- details.Address
}
close(ch)
}()
return ch
}
func (s *Sweeper) GetSweepTxAmountAndWeightInBytes(utxos []*RelevantTx) (outputAmount int64, weightInBytes int64, err error) {
@ -50,5 +98,59 @@ func (s *Sweeper) BuildSweepTx(utxos []*RelevantTx, fee int64) (*wire.MsgTx, err
}
func (s *Sweeper) BroadcastTx(tx *wire.MsgTx) error {
return s.ChainService.SendTransaction(tx)
// Connect to an Electurm server using a fresh client and provider pair:
sp := scanner.NewServerProvider() // TODO create servers module, for provider and pool
client := electrum.NewClient()
for !client.IsConnected() {
client.Connect(sp.NextServer())
}
// Encode the transaction for broadcast:
txBytes := new(bytes.Buffer)
err := tx.BtcEncode(txBytes, wire.ProtocolVersion, wire.WitnessEncoding)
if err != nil {
return fmt.Errorf("error while encoding tx: %w", err)
}
txHex := hex.EncodeToString(txBytes.Bytes())
// Do the thing!
_, err = client.Broadcast(txHex)
if err != nil {
return fmt.Errorf("error while broadcasting: %w", err)
}
return nil
}
// buildRelevantTxs prepares the output from Scanner for crafting.
func buildRelevantTxs(utxos []scanner.Utxo) ([]*RelevantTx, error) {
var relevantTxs []*RelevantTx
for _, utxo := range utxos {
address := utxo.Address.Address()
chainHash, err := chainhash.NewHashFromStr(utxo.TxID)
if err != nil {
return nil, err
}
relevantTx := &RelevantTx{
PkScript: utxo.Script,
Address: address,
Spent: false,
Satoshis: int64(utxo.Amount),
SigningDetails: signingDetails{utxo.Address},
Outpoint: wire.OutPoint{
Hash: *chainHash,
Index: uint32(utxo.OutputIndex),
},
}
relevantTxs = append(relevantTxs, relevantTx)
}
return relevantTxs, nil
}

53
utils/logger.go Normal file
View File

@ -0,0 +1,53 @@
package utils
import (
"fmt"
"os"
"strings"
)
// DebugMode is true when the `DEBUG` environment variable is set to "true".
var DebugMode bool = os.Getenv("DEBUG") == "true"
// Logger provides logging methods that only print when `DebugMode` is true.
// This allows callers to log detailed information without displaying it to users during normal
// execution.
type Logger struct {
tag string
}
// NewLogger returns an initialized Logger instance.
func NewLogger(tag string) *Logger {
return &Logger{tag}
}
// SetTag updates the tag of this Logger.
func (l *Logger) SetTag(newTag string) {
l.tag = newTag
}
// Printf works like fmt.Printf, but only prints when `DebugMode` is true.
func (l *Logger) Printf(format string, v ...interface{}) {
if !DebugMode {
return
}
message := strings.TrimSpace(fmt.Sprintf(format, v...))
fmt.Printf("%s %s\n", l.getPrefix(), message)
}
// Errorf works like fmt.Errorf, but prints the error to the console if `DebugMode` is true.
func (l *Logger) Errorf(format string, v ...interface{}) error {
err := fmt.Errorf(format, v...)
if DebugMode {
fmt.Printf("%s %v\n", l.getPrefix(), err)
}
return err
}
func (l *Logger) getPrefix() string {
return fmt.Sprintf("[%s]", l.tag)
}

6
vendor/github.com/hhrutter/lzw/.gitignore generated vendored Normal file
View File

@ -0,0 +1,6 @@
# Mac
**/.DS_Store
**/._.DS_Store
# VSCode
.vscode/*

27
vendor/github.com/hhrutter/lzw/LICENSE generated vendored Normal file
View File

@ -0,0 +1,27 @@
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

37
vendor/github.com/hhrutter/lzw/README.md generated vendored Normal file
View File

@ -0,0 +1,37 @@
# Note
* This is a consolidated version of [compress/lzw](https://github.com/golang/go/tree/master/src/compress/lzw) that supports GIF, TIFF and PDF.
* Please refer to this [golang proposal](https://github.com/golang/go/issues/25409) for details.
* [github.com/hhrutter/tiff](https://github.com/hhrutter/tiff) uses this package to extend [x/image/tiff](https://github.com/golang/image/tree/master/tiff).
* [pdfcpu](https://github.com/pdfcpu/pdfcpu) uses this package for processing PDFs with embedded TIFF images.
## Background
* PDF's LZWDecode filter comes with the optional parameter `EarlyChange`.
* The type of this parameter is `int` and the defined values are 0 and 1.
* The default value is 1.
This parameter implies two variants of lzw. (See the [PDF spec](https://www.adobe.com/content/dam/acom/en/devnet/pdf/pdfs/PDF32000_2008.pdf)).
[compress/lzw](https://github.com/golang/go/tree/master/src/compress/lzw):
* the algorithm implied by EarlyChange value 1
* provides both Reader and Writer.
[x/image/tiff/lzw](https://github.com/golang/image/tree/master/tiff/lzw):
* the algorithm implied by EarlyChange value 0
* provides a Reader, lacks a Writer
In addition PDF expects a leading `clear_table` marker right at the beginning
which is not something [compress/lzw](https://github.com/golang/go/tree/master/src/compress/lzw) takes into account.
There are numerous PDF Writers out there and for arbitrary PDF files using the LZWDecode filter the following can be observed:
* Some PDF writers do not write the EOD (end of data) marker.
* Some PDF writers do not write the final bits after the EOD marker.
## Goal
An extended version of [compress/lzw](https://github.com/golang/go/tree/master/src/compress/lzw) with reliable support for GIF, TIFF and PDF.

3
vendor/github.com/hhrutter/lzw/go.mod generated vendored Normal file
View File

@ -0,0 +1,3 @@
module github.com/hhrutter/lzw
go 1.12

238
vendor/github.com/hhrutter/lzw/reader.go generated vendored Normal file
View File

@ -0,0 +1,238 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package lzw is an enhanced version of compress/lzw.
//
// It implements Adobe's PDF lzw compression as defined for the LZWDecode filter
// and is also compatible with the TIFF file format.
//
// See the golang proposal: https://github.com/golang/go/issues/25409.
//
// More information: https://github.com/pdfcpu/pdfcpu/tree/master/lzw
package lzw
import (
"bufio"
"errors"
"io"
)
const (
maxWidth = 12
decoderInvalidCode = 0xffff
flushBuffer = 1 << maxWidth
)
// decoder is the state from which the readXxx method converts a byte
// stream into a code stream.
type decoder struct {
r io.ByteReader
bits uint32
nBits uint
width uint
read func(*decoder) (uint16, error) // readMSB always for PDF and TIFF
litWidth uint // width in bits of literal codes
err error
// The first 1<<litWidth codes are literal codes.
// The next two codes mean clear and EOF.
// Other valid codes are in the range [lo, hi] where lo := clear + 2,
// with the upper bound incrementing on each code seen.
// overflow is the code at which hi overflows the code width. NOTE: TIFF's LZW is "off by one".
// last is the most recently seen code, or decoderInvalidCode.
//
// An invariant is that
// (hi < overflow) || (hi == overflow && last == decoderInvalidCode)
clear, eof, hi, overflow, last uint16
// Each code c in [lo, hi] expands to two or more bytes. For c != hi:
// suffix[c] is the last of these bytes.
// prefix[c] is the code for all but the last byte.
// This code can either be a literal code or another code in [lo, c).
// The c == hi case is a special case.
suffix [1 << maxWidth]uint8
prefix [1 << maxWidth]uint16
// output is the temporary output buffer.
// Literal codes are accumulated from the start of the buffer.
// Non-literal codes decode to a sequence of suffixes that are first
// written right-to-left from the end of the buffer before being copied
// to the start of the buffer.
// It is flushed when it contains >= 1<<maxWidth bytes,
// so that there is always room to decode an entire code.
output [2 * 1 << maxWidth]byte
o int // write index into output
toRead []byte // bytes to return from Read
// oneOff makes code length increases occur one code early.
oneOff bool
}
// readMSB returns the next code for "Most Significant Bits first" data.
func (d *decoder) readMSB() (uint16, error) {
for d.nBits < d.width {
x, err := d.r.ReadByte()
if err != nil {
return 0, err
}
d.bits |= uint32(x) << (24 - d.nBits)
d.nBits += 8
}
code := uint16(d.bits >> (32 - d.width))
d.bits <<= d.width
d.nBits -= d.width
return code, nil
}
func (d *decoder) Read(b []byte) (int, error) {
for {
if len(d.toRead) > 0 {
n := copy(b, d.toRead)
d.toRead = d.toRead[n:]
return n, nil
}
if d.err != nil {
return 0, d.err
}
d.decode()
}
}
func (d *decoder) handleOverflow() {
ui := d.hi
if d.oneOff {
ui++
}
if ui >= d.overflow {
if d.width == maxWidth {
d.last = decoderInvalidCode
// Undo the d.hi++ a few lines above, so that (1) we maintain
// the invariant that d.hi <= d.overflow, and (2) d.hi does not
// eventually overflow a uint16.
if !d.oneOff {
d.hi--
}
} else {
d.width++
d.overflow <<= 1
}
}
}
// decode decompresses bytes from r and leaves them in d.toRead.
// read specifies how to decode bytes into codes.
// litWidth is the width in bits of literal codes.
func (d *decoder) decode() {
i := 0
// Loop over the code stream, converting codes into decompressed bytes.
loop:
for {
code, err := d.read(d)
i++
if err != nil {
// Some PDF Writers write an EOD some don't.
// Don't insist on EOD marker.
// Don't return an unexpected EOF error.
d.err = err
break
}
switch {
case code < d.clear:
// We have a literal code.
d.output[d.o] = uint8(code)
d.o++
if d.last != decoderInvalidCode {
// Save what the hi code expands to.
d.suffix[d.hi] = uint8(code)
d.prefix[d.hi] = d.last
}
case code == d.clear:
d.width = 1 + d.litWidth
d.hi = d.eof
d.overflow = 1 << d.width
d.last = decoderInvalidCode
continue
case code == d.eof:
d.err = io.EOF
break loop
case code <= d.hi:
c, i := code, len(d.output)-1
if code == d.hi && d.last != decoderInvalidCode {
// code == hi is a special case which expands to the last expansion
// followed by the head of the last expansion. To find the head, we walk
// the prefix chain until we find a literal code.
c = d.last
for c >= d.clear {
c = d.prefix[c]
}
d.output[i] = uint8(c)
i--
c = d.last
}
// Copy the suffix chain into output and then write that to w.
for c >= d.clear {
d.output[i] = d.suffix[c]
i--
c = d.prefix[c]
}
d.output[i] = uint8(c)
d.o += copy(d.output[d.o:], d.output[i:])
if d.last != decoderInvalidCode {
// Save what the hi code expands to.
d.suffix[d.hi] = uint8(c)
d.prefix[d.hi] = d.last
}
default:
d.err = errors.New("lzw: invalid code")
break loop
}
d.last, d.hi = code, d.hi+1
d.handleOverflow()
if d.o >= flushBuffer {
break
}
}
// Flush pending output.
d.toRead = d.output[:d.o]
d.o = 0
}
var errClosed = errors.New("lzw: reader/writer is closed")
func (d *decoder) Close() error {
d.err = errClosed // in case any Reads come along
return nil
}
// NewReader creates a new io.ReadCloser.
// Reads from the returned io.ReadCloser read and decompress data from r.
// If r does not also implement io.ByteReader,
// the decompressor may read more data than necessary from r.
// It is the caller's responsibility to call Close on the ReadCloser when
// finished reading.
// oneOff makes code length increases occur one code early. It should be true
// for LZWDecode filters with earlyChange=1 which is also the default.
func NewReader(r io.Reader, oneOff bool) io.ReadCloser {
br, ok := r.(io.ByteReader)
if !ok {
br = bufio.NewReader(r)
}
lw := uint(8)
clear := uint16(1) << lw
width := 1 + lw
return &decoder{
r: br,
read: (*decoder).readMSB,
litWidth: lw,
width: width,
clear: clear,
eof: clear + 1,
hi: clear + 1,
overflow: uint16(1) << width,
last: decoderInvalidCode,
oneOff: oneOff,
}
}

283
vendor/github.com/hhrutter/lzw/writer.go generated vendored Normal file
View File

@ -0,0 +1,283 @@
// Derived from compress/lzw in order to implement
// Adobe's PDF lzw compression as defined for the LZWDecode filter.
// See https://www.adobe.com/content/dam/acom/en/devnet/pdf/pdfs/PDF32000_2008.pdf
// and https://github.com/golang/go/issues/25409.
//
// It is also compatible with the TIFF file format.
//
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package lzw
import (
"bufio"
"errors"
"io"
)
// A writer is a buffered, flushable writer.
type writer interface {
io.ByteWriter
Flush() error
}
// An errWriteCloser is an io.WriteCloser that always returns a given error.
type errWriteCloser struct {
err error
}
func (e *errWriteCloser) Write([]byte) (int, error) {
return 0, e.err
}
func (e *errWriteCloser) Close() error {
return e.err
}
const (
// A code is a 12 bit value, stored as a uint32 when encoding to avoid
// type conversions when shifting bits.
maxCode = 1<<12 - 1
invalidCode = 1<<32 - 1
// There are 1<<12 possible codes, which is an upper bound on the number of
// valid hash table entries at any given point in time. tableSize is 4x that.
tableSize = 4 * 1 << 12
tableMask = tableSize - 1
// A hash table entry is a uint32. Zero is an invalid entry since the
// lower 12 bits of a valid entry must be a non-literal code.
invalidEntry = 0
)
// encoder is LZW compressor.
type encoder struct {
// w is the writer that compressed bytes are written to.
w writer
// write, bits, nBits and width are the state for
// converting a code stream into a byte stream.
write func(*encoder, uint32) error
bits uint32
nBits uint
width uint
// litWidth is the width in bits of literal codes.
litWidth uint
// hi is the code implied by the next code emission.
// overflow is the code at which hi overflows the code width.
hi, overflow uint32
// savedCode is the accumulated code at the end of the most recent Write
// call. It is equal to invalidCode if there was no such call.
savedCode uint32
// err is the first error encountered during writing. Closing the encoder
// will make any future Write calls return errClosed
err error
// table is the hash table from 20-bit keys to 12-bit values. Each table
// entry contains key<<12|val and collisions resolve by linear probing.
// The keys consist of a 12-bit code prefix and an 8-bit byte suffix.
// The values are a 12-bit code.
table [tableSize]uint32
// oneOff makes code length increases occur one code early.
oneOff bool
}
// writeLSB writes the code c for "Least Significant Bits first" data.
func (e *encoder) writeLSB(c uint32) error {
e.bits |= c << e.nBits
e.nBits += e.width
for e.nBits >= 8 {
if err := e.w.WriteByte(uint8(e.bits)); err != nil {
return err
}
e.bits >>= 8
e.nBits -= 8
}
return nil
}
// writeMSB writes the code c for "Most Significant Bits first" data.
func (e *encoder) writeMSB(c uint32) error {
e.bits |= c << (32 - e.width - e.nBits)
e.nBits += e.width
for e.nBits >= 8 {
if err := e.w.WriteByte(uint8(e.bits >> 24)); err != nil {
return err
}
e.bits <<= 8
e.nBits -= 8
}
return nil
}
// errOutOfCodes is an internal error that means that the encoder has run out
// of unused codes and a clear code needs to be sent next.
var errOutOfCodes = errors.New("lzw: out of codes")
// incHi increments e.hi and checks for both overflow and running out of
// unused codes. In the latter case, incHi sends a clear code, resets the
// encoder state and returns errOutOfCodes.
func (e *encoder) incHi() error {
e.hi++
// The PDF spec defines for the LZWDecode filter a parameter "EarlyChange".
// This parameter drives the variation of lzw compression to be used.
// The standard compress/lzw does not know about oneOff.
ui := e.hi
if e.oneOff {
ui++
}
if ui == e.overflow {
e.width++
e.overflow <<= 1
}
if ui == maxCode {
clear := uint32(1) << e.litWidth
if err := e.write(e, clear); err != nil {
return err
}
e.width = e.litWidth + 1
e.hi = clear + 1
e.overflow = clear << 1
for i := range e.table {
e.table[i] = invalidEntry
}
return errOutOfCodes
}
return nil
}
// Write writes a compressed representation of p to e's underlying writer.
func (e *encoder) Write(p []byte) (n int, err error) {
if e.err != nil {
return 0, e.err
}
if len(p) == 0 {
return 0, nil
}
if maxLit := uint8(1<<e.litWidth - 1); maxLit != 0xff {
for _, x := range p {
if x > maxLit {
e.err = errors.New("lzw: input byte too large for the litWidth")
return 0, e.err
}
}
}
n = len(p)
code := e.savedCode
if code == invalidCode {
// The first code sent is always a literal code.
code, p = uint32(p[0]), p[1:]
}
loop:
for _, x := range p {
literal := uint32(x)
key := code<<8 | literal
// If there is a hash table hit for this key then we continue the loop
// and do not emit a code yet.
hash := (key>>12 ^ key) & tableMask
for h, t := hash, e.table[hash]; t != invalidEntry; {
if key == t>>12 {
code = t & maxCode
continue loop
}
h = (h + 1) & tableMask
t = e.table[h]
}
// Otherwise, write the current code, and literal becomes the start of
// the next emitted code.
if e.err = e.write(e, code); e.err != nil {
return 0, e.err
}
code = literal
// Increment e.hi, the next implied code. If we run out of codes, reset
// the encoder state (including clearing the hash table) and continue.
if err1 := e.incHi(); err1 != nil {
if err1 == errOutOfCodes {
continue
}
e.err = err1
return 0, e.err
}
// Otherwise, insert key -> e.hi into the map that e.table represents.
for {
if e.table[hash] == invalidEntry {
e.table[hash] = (key << 12) | e.hi
break
}
hash = (hash + 1) & tableMask
}
}
e.savedCode = code
return n, nil
}
// Close closes the encoder, flushing any pending output. It does not close or
// flush e's underlying writer.
func (e *encoder) Close() error {
if e.err != nil {
if e.err == errClosed {
return nil
}
return e.err
}
// Make any future calls to Write return errClosed.
e.err = errClosed
// Write the savedCode if valid.
if e.savedCode != invalidCode {
if err := e.write(e, e.savedCode); err != nil {
return err
}
if err := e.incHi(); err != nil && err != errOutOfCodes {
return err
}
}
// Write the eof code.
eof := uint32(1)<<e.litWidth + 1
if err := e.write(e, eof); err != nil {
return err
}
//Write the final bits.
if e.nBits > 0 {
e.bits >>= 24
if err := e.w.WriteByte(uint8(e.bits)); err != nil {
return err
}
}
return e.w.Flush()
}
// NewWriter creates a new io.WriteCloser.
// Writes to the returned io.WriteCloser are compressed and written to w.
// It is the caller's responsibility to call Close on the WriteCloser when
// finished writing.
// oneOff makes code length increases occur one code early. It should be true
// for LZWDecode filters with earlyChange=1 which is also the default.
func NewWriter(w io.Writer, oneOff bool) io.WriteCloser {
bw, ok := w.(writer)
if !ok {
bw = bufio.NewWriter(w)
}
lw := uint(8)
e := encoder{
w: bw,
write: (*encoder).writeMSB,
litWidth: lw,
width: 1 + lw,
hi: 1<<lw + 1,
overflow: 1 << (lw + 1),
savedCode: invalidCode,
oneOff: oneOff,
}
// Write initial clear_table.
// The standard compress/lzw does not do this.
clear := uint32(1) << e.litWidth
e.write(&e, clear)
return &e
}

6
vendor/github.com/hhrutter/tiff/.gitignore generated vendored Normal file
View File

@ -0,0 +1,6 @@
# Mac
**/.DS_Store
**/._.DS_Store
# VSCode
.vscode/*

27
vendor/github.com/hhrutter/tiff/LICENSE generated vendored Normal file
View File

@ -0,0 +1,27 @@
Copyright (c) 2009 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

23
vendor/github.com/hhrutter/tiff/README.md generated vendored Normal file
View File

@ -0,0 +1,23 @@
# Note
This package is an improved version of [x/image/tiff](https://github.com/golang/image/tree/master/tiff) featuring:
* Read support for CCITT Group3/4 compressed images using [x/image/ccitt](https://github.com/golang/image/tree/master/ccitt)
* Read/write support for LZW compressed images using [github.com/hhrutter/lzw](https://github.com/hhrutter/lzw)
* Read/write support for the CMYK color model.
## Background
Working on [pdfcpu](https://github.com/pdfcpu/pdfcpu) (a PDF processor) created a need for processing TIFF files and LZW compression in details beyond the standard library.
1) CCITT compression for monochrome images was the first need. This is being addressed as part of ongoing work on [x/image/ccitt](https://github.com/golang/image/tree/master/ccitt).
2) As stated in this [golang proposal](https://github.com/golang/go/issues/25409) Go LZW implementations are spread out over the standard library at [compress/lzw](https://github.com/golang/go/tree/master/src/compress/lzw) and [x/image/tiff/lzw](https://github.com/golang/image/tree/master/tiff/lzw). As of Go 1.12 [compress/lzw](https://github.com/golang/go/tree/master/src/compress/lzw) works reliably for GIF only. This is also the reason the TIFF package at [x/image/tiff](https://github.com/golang/image/tree/master/tiff) provides its own lzw implementation for compression. With PDF there is a third variant of lzw needed for reading/writing lzw compressed PDF object streams and processing embedded TIFF images.
[github.com/hhrutter/lzw](https://github.com/hhrutter/lzw) fills this gap. It is an extended version of [compress/lzw](https://github.com/golang/go/tree/master/src/compress/lzw) supporting GIF, PDF and TIFF.
3) The PDF specification defines a CMYK color space. This is currently not supported at [x/image/tiff](https://github.com/golang/image/tree/master/tiff).
## Goal
An improved version of [x/image/tiff](https://github.com/golang/image/tree/master/tiff) with full read/write support for CCITT, LZW compression and the CMYK color model.

69
vendor/github.com/hhrutter/tiff/buffer.go generated vendored Normal file
View File

@ -0,0 +1,69 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package tiff
import "io"
// buffer buffers an io.Reader to satisfy io.ReaderAt.
type buffer struct {
r io.Reader
buf []byte
}
// fill reads data from b.r until the buffer contains at least end bytes.
func (b *buffer) fill(end int) error {
m := len(b.buf)
if end > m {
if end > cap(b.buf) {
newcap := 1024
for newcap < end {
newcap *= 2
}
newbuf := make([]byte, end, newcap)
copy(newbuf, b.buf)
b.buf = newbuf
} else {
b.buf = b.buf[:end]
}
if n, err := io.ReadFull(b.r, b.buf[m:end]); err != nil {
end = m + n
b.buf = b.buf[:end]
return err
}
}
return nil
}
func (b *buffer) ReadAt(p []byte, off int64) (int, error) {
o := int(off)
end := o + len(p)
if int64(end) != off+int64(len(p)) {
return 0, io.ErrUnexpectedEOF
}
err := b.fill(end)
return copy(p, b.buf[o:end]), err
}
// Slice returns a slice of the underlying buffer. The slice contains
// n bytes starting at offset off.
func (b *buffer) Slice(off, n int) ([]byte, error) {
end := off + n
if err := b.fill(end); err != nil {
return nil, err
}
return b.buf[off:end], nil
}
// newReaderAt converts an io.Reader into an io.ReaderAt.
func newReaderAt(r io.Reader) io.ReaderAt {
if ra, ok := r.(io.ReaderAt); ok {
return ra
}
return &buffer{
r: r,
buf: make([]byte, 0, 1024),
}
}

58
vendor/github.com/hhrutter/tiff/compress.go generated vendored Normal file
View File

@ -0,0 +1,58 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package tiff
import (
"bufio"
"io"
)
type byteReader interface {
io.Reader
io.ByteReader
}
// unpackBits decodes the PackBits-compressed data in src and returns the
// uncompressed data.
//
// The PackBits compression format is described in section 9 (p. 42)
// of the TIFF spec.
func unpackBits(r io.Reader) ([]byte, error) {
buf := make([]byte, 128)
dst := make([]byte, 0, 1024)
br, ok := r.(byteReader)
if !ok {
br = bufio.NewReader(r)
}
for {
b, err := br.ReadByte()
if err != nil {
if err == io.EOF {
return dst, nil
}
return nil, err
}
code := int(int8(b))
switch {
case code >= 0:
n, err := io.ReadFull(br, buf[:code+1])
if err != nil {
return nil, err
}
dst = append(dst, buf[:n]...)
case code == -128:
// No-op.
default:
if b, err = br.ReadByte(); err != nil {
return nil, err
}
for j := 0; j < 1-code; j++ {
buf[j] = b
}
dst = append(dst, buf[:1-code]...)
}
}
}

149
vendor/github.com/hhrutter/tiff/consts.go generated vendored Normal file
View File

@ -0,0 +1,149 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package tiff
// A tiff image file contains one or more images. The metadata
// of each image is contained in an Image File Directory (IFD),
// which contains entries of 12 bytes each and is described
// on page 14-16 of the specification. An IFD entry consists of
//
// - a tag, which describes the signification of the entry,
// - the data type and length of the entry,
// - the data itself or a pointer to it if it is more than 4 bytes.
//
// The presence of a length means that each IFD is effectively an array.
const (
leHeader = "II\x2A\x00" // Header for little-endian files.
beHeader = "MM\x00\x2A" // Header for big-endian files.
ifdLen = 12 // Length of an IFD entry in bytes.
)
// Data types (p. 14-16 of the spec).
const (
dtByte = 1
dtASCII = 2
dtShort = 3
dtLong = 4
dtRational = 5
)
// The length of one instance of each data type in bytes.
var lengths = [...]uint32{0, 1, 1, 2, 4, 8}
// Tags (see p. 28-41 of the spec).
const (
tImageWidth = 256
tImageLength = 257
tBitsPerSample = 258
tCompression = 259
tPhotometricInterpretation = 262
tFillOrder = 266
tStripOffsets = 273
tSamplesPerPixel = 277
tRowsPerStrip = 278
tStripByteCounts = 279
tT4Options = 292 // CCITT Group 3 options, a set of 32 flag bits.
tT6Options = 293 // CCITT Group 4 options, a set of 32 flag bits.
tTileWidth = 322
tTileLength = 323
tTileOffsets = 324
tTileByteCounts = 325
tXResolution = 282
tYResolution = 283
tResolutionUnit = 296
tPredictor = 317
tColorMap = 320
tExtraSamples = 338
tSampleFormat = 339
)
// Compression types (defined in various places in the spec and supplements).
const (
cNone = 1
cCCITT = 2
cG3 = 3 // Group 3 Fax.
cG4 = 4 // Group 4 Fax.
cLZW = 5
cJPEGOld = 6 // Superseded by cJPEG.
cJPEG = 7
cDeflate = 8 // zlib compression.
cPackBits = 32773
cDeflateOld = 32946 // Superseded by cDeflate.
)
// Photometric interpretation values (see p. 37 of the spec).
const (
pWhiteIsZero = 0
pBlackIsZero = 1
pRGB = 2
pPaletted = 3
pTransMask = 4 // transparency mask
pCMYK = 5
pYCbCr = 6
pCIELab = 8
)
// Values for the tPredictor tag (page 64-65 of the spec).
const (
prNone = 1
prHorizontal = 2
)
// Values for the tResolutionUnit tag (page 18).
const (
resNone = 1
resPerInch = 2 // Dots per inch.
resPerCM = 3 // Dots per centimeter.
)
// imageMode represents the mode of the image.
type imageMode int
const (
mBilevel imageMode = iota
mPaletted
mGray
mGrayInvert
mRGB
mRGBA
mNRGBA
mCMYK
)
// CompressionType describes the type of compression used in Options.
type CompressionType int
// Constants for supported compression types.
const (
Uncompressed CompressionType = iota
Deflate
LZW
CCITTGroup3
CCITTGroup4
)
// specValue returns the compression type constant from the TIFF spec that
// is equivalent to c.
func (c CompressionType) specValue() uint32 {
switch c {
case LZW:
return cLZW
case Deflate:
return cDeflate
case CCITTGroup3:
return cG3
case CCITTGroup4:
return cG4
}
return cNone
}

8
vendor/github.com/hhrutter/tiff/go.mod generated vendored Normal file
View File

@ -0,0 +1,8 @@
module github.com/hhrutter/tiff
go 1.12
require (
github.com/hhrutter/lzw v0.0.0-20190827003112-58b82c5a41cc
golang.org/x/image v0.0.0-20190823064033-3a9bac650e44
)

6
vendor/github.com/hhrutter/tiff/go.sum generated vendored Normal file
View File

@ -0,0 +1,6 @@
github.com/hhrutter/lzw v0.0.0-20190826233241-e4e67a6cc9b8 h1:U1DNFAgO5OSS70hFTvB7PN/Ex0mhqC7cZZ4FUaNJ8F0=
github.com/hhrutter/lzw v0.0.0-20190827003112-58b82c5a41cc h1:crd+cScoxEqSOqClzjkNMNQNdMCF3SGXhPdDWBQfNZE=
github.com/hhrutter/lzw v0.0.0-20190827003112-58b82c5a41cc/go.mod h1:yJBvOcu1wLQ9q9XZmfiPfur+3dQJuIhYQsMGLYcItZk=
golang.org/x/image v0.0.0-20190823064033-3a9bac650e44 h1:1/e6LjNi7iqpDTz8tCLSKoR5dqrX4C3ub4H31JJZM4U=
golang.org/x/image v0.0.0-20190823064033-3a9bac650e44/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=

735
vendor/github.com/hhrutter/tiff/reader.go generated vendored Normal file
View File

@ -0,0 +1,735 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package tiff is an enhanced version of x/image/tiff.
//
// It uses a consolidated version of compress/lzw (https://github.com/hhrutter/lzw) for compression and also adds support for CMYK.
//
// More information: https://github.com/hhrutter/tiff
package tiff
import (
"compress/zlib"
"encoding/binary"
"fmt"
"image"
"image/color"
"io"
"io/ioutil"
"math"
"github.com/hhrutter/lzw"
"golang.org/x/image/ccitt"
)
// A FormatError reports that the input is not a valid TIFF image.
type FormatError string
func (e FormatError) Error() string {
return "tiff: invalid format: " + string(e)
}
// An UnsupportedError reports that the input uses a valid but
// unimplemented feature.
type UnsupportedError string
func (e UnsupportedError) Error() string {
return "tiff: unsupported feature: " + string(e)
}
var errNoPixels = FormatError("not enough pixel data")
type decoder struct {
r io.ReaderAt
byteOrder binary.ByteOrder
config image.Config
mode imageMode
bpp uint
features map[int][]uint
palette []color.Color
buf []byte
off int // Current offset in buf.
v uint32 // Buffer value for reading with arbitrary bit depths.
nbits uint // Remaining number of bits in v.
}
// firstVal returns the first uint of the features entry with the given tag,
// or 0 if the tag does not exist.
func (d *decoder) firstVal(tag int) uint {
f := d.features[tag]
if len(f) == 0 {
return 0
}
return f[0]
}
// ifdUint decodes the IFD entry in p, which must be of the Byte, Short
// or Long type, and returns the decoded uint values.
func (d *decoder) ifdUint(p []byte) (u []uint, err error) {
var raw []byte
if len(p) < ifdLen {
return nil, FormatError("bad IFD entry")
}
datatype := d.byteOrder.Uint16(p[2:4])
if dt := int(datatype); dt <= 0 || dt >= len(lengths) {
return nil, UnsupportedError("IFD entry datatype")
}
count := d.byteOrder.Uint32(p[4:8])
if count > math.MaxInt32/lengths[datatype] {
return nil, FormatError("IFD data too large")
}
if datalen := lengths[datatype] * count; datalen > 4 {
// The IFD contains a pointer to the real value.
raw = make([]byte, datalen)
_, err = d.r.ReadAt(raw, int64(d.byteOrder.Uint32(p[8:12])))
} else {
raw = p[8 : 8+datalen]
}
if err != nil {
return nil, err
}
u = make([]uint, count)
switch datatype {
case dtByte:
for i := uint32(0); i < count; i++ {
u[i] = uint(raw[i])
}
case dtShort:
for i := uint32(0); i < count; i++ {
u[i] = uint(d.byteOrder.Uint16(raw[2*i : 2*(i+1)]))
}
case dtLong:
for i := uint32(0); i < count; i++ {
u[i] = uint(d.byteOrder.Uint32(raw[4*i : 4*(i+1)]))
}
default:
return nil, UnsupportedError("data type")
}
return u, nil
}
// parseIFD decides whether the the IFD entry in p is "interesting" and
// stows away the data in the decoder. It returns the tag number of the
// entry and an error, if any.
func (d *decoder) parseIFD(p []byte) (int, error) {
tag := d.byteOrder.Uint16(p[0:2])
switch tag {
case tBitsPerSample,
tExtraSamples,
tPhotometricInterpretation,
tCompression,
tPredictor,
tStripOffsets,
tStripByteCounts,
tRowsPerStrip,
tTileWidth,
tTileLength,
tTileOffsets,
tTileByteCounts,
tImageLength,
tImageWidth,
tFillOrder,
tT4Options,
tT6Options:
val, err := d.ifdUint(p)
if err != nil {
return 0, err
}
d.features[int(tag)] = val
case tColorMap:
val, err := d.ifdUint(p)
if err != nil {
return 0, err
}
numcolors := len(val) / 3
if len(val)%3 != 0 || numcolors <= 0 || numcolors > 256 {
return 0, FormatError("bad ColorMap length")
}
d.palette = make([]color.Color, numcolors)
for i := 0; i < numcolors; i++ {
d.palette[i] = color.RGBA64{
uint16(val[i]),
uint16(val[i+numcolors]),
uint16(val[i+2*numcolors]),
0xffff,
}
}
case tSampleFormat:
// Page 27 of the spec: If the SampleFormat is present and
// the value is not 1 [= unsigned integer data], a Baseline
// TIFF reader that cannot handle the SampleFormat value
// must terminate the import process gracefully.
val, err := d.ifdUint(p)
if err != nil {
return 0, err
}
for _, v := range val {
if v != 1 {
return 0, UnsupportedError("sample format")
}
}
}
return int(tag), nil
}
// readBits reads n bits from the internal buffer starting at the current offset.
func (d *decoder) readBits(n uint) (v uint32, ok bool) {
for d.nbits < n {
d.v <<= 8
if d.off >= len(d.buf) {
return 0, false
}
d.v |= uint32(d.buf[d.off])
d.off++
d.nbits += 8
}
d.nbits -= n
rv := d.v >> d.nbits
d.v &^= rv << d.nbits
return rv, true
}
// flushBits discards the unread bits in the buffer used by readBits.
// It is used at the end of a line.
func (d *decoder) flushBits() {
d.v = 0
d.nbits = 0
}
// minInt returns the smaller of x or y.
func minInt(a, b int) int {
if a <= b {
return a
}
return b
}
// decode decodes the raw data of an image.
// It reads from d.buf and writes the strip or tile into dst.
func (d *decoder) decode(dst image.Image, xmin, ymin, xmax, ymax int) error {
d.off = 0
// Apply horizontal predictor if necessary.
// In this case, p contains the color difference to the preceding pixel.
// See page 64-65 of the spec.
if d.firstVal(tPredictor) == prHorizontal {
switch d.bpp {
case 16:
var off int
n := 2 * len(d.features[tBitsPerSample]) // bytes per sample times samples per pixel
for y := ymin; y < ymax; y++ {
off += n
for x := 0; x < (xmax-xmin-1)*n; x += 2 {
if off+2 > len(d.buf) {
return errNoPixels
}
v0 := d.byteOrder.Uint16(d.buf[off-n : off-n+2])
v1 := d.byteOrder.Uint16(d.buf[off : off+2])
d.byteOrder.PutUint16(d.buf[off:off+2], v1+v0)
off += 2
}
}
case 8:
var off int
n := 1 * len(d.features[tBitsPerSample]) // bytes per sample times samples per pixel
for y := ymin; y < ymax; y++ {
off += n
for x := 0; x < (xmax-xmin-1)*n; x++ {
if off >= len(d.buf) {
return errNoPixels
}
d.buf[off] += d.buf[off-n]
off++
}
}
case 1:
return UnsupportedError("horizontal predictor with 1 BitsPerSample")
}
}
rMaxX := minInt(xmax, dst.Bounds().Max.X)
rMaxY := minInt(ymax, dst.Bounds().Max.Y)
switch d.mode {
case mGray, mGrayInvert:
if d.bpp == 16 {
img := dst.(*image.Gray16)
for y := ymin; y < rMaxY; y++ {
for x := xmin; x < rMaxX; x++ {
if d.off+2 > len(d.buf) {
return errNoPixels
}
v := d.byteOrder.Uint16(d.buf[d.off : d.off+2])
d.off += 2
if d.mode == mGrayInvert {
v = 0xffff - v
}
img.SetGray16(x, y, color.Gray16{v})
}
if rMaxX == img.Bounds().Max.X {
d.off += 2 * (xmax - img.Bounds().Max.X)
}
}
} else {
img := dst.(*image.Gray)
max := uint32((1 << d.bpp) - 1)
for y := ymin; y < rMaxY; y++ {
for x := xmin; x < rMaxX; x++ {
v, ok := d.readBits(d.bpp)
if !ok {
return errNoPixels
}
v = v * 0xff / max
if d.mode == mGrayInvert {
v = 0xff - v
}
img.SetGray(x, y, color.Gray{uint8(v)})
}
d.flushBits()
}
}
case mPaletted:
img := dst.(*image.Paletted)
for y := ymin; y < rMaxY; y++ {
for x := xmin; x < rMaxX; x++ {
v, ok := d.readBits(d.bpp)
if !ok {
return errNoPixels
}
img.SetColorIndex(x, y, uint8(v))
}
d.flushBits()
}
case mRGB:
if d.bpp == 16 {
img := dst.(*image.RGBA64)
for y := ymin; y < rMaxY; y++ {
for x := xmin; x < rMaxX; x++ {
if d.off+6 > len(d.buf) {
return errNoPixels
}
r := d.byteOrder.Uint16(d.buf[d.off+0 : d.off+2])
g := d.byteOrder.Uint16(d.buf[d.off+2 : d.off+4])
b := d.byteOrder.Uint16(d.buf[d.off+4 : d.off+6])
d.off += 6
img.SetRGBA64(x, y, color.RGBA64{r, g, b, 0xffff})
}
}
} else {
img := dst.(*image.RGBA)
for y := ymin; y < rMaxY; y++ {
min := img.PixOffset(xmin, y)
max := img.PixOffset(rMaxX, y)
off := (y - ymin) * (xmax - xmin) * 3
for i := min; i < max; i += 4 {
if off+3 > len(d.buf) {
return errNoPixels
}
img.Pix[i+0] = d.buf[off+0]
img.Pix[i+1] = d.buf[off+1]
img.Pix[i+2] = d.buf[off+2]
img.Pix[i+3] = 0xff
off += 3
}
}
}
case mNRGBA:
if d.bpp == 16 {
img := dst.(*image.NRGBA64)
for y := ymin; y < rMaxY; y++ {
for x := xmin; x < rMaxX; x++ {
if d.off+8 > len(d.buf) {
return errNoPixels
}
r := d.byteOrder.Uint16(d.buf[d.off+0 : d.off+2])
g := d.byteOrder.Uint16(d.buf[d.off+2 : d.off+4])
b := d.byteOrder.Uint16(d.buf[d.off+4 : d.off+6])
a := d.byteOrder.Uint16(d.buf[d.off+6 : d.off+8])
d.off += 8
img.SetNRGBA64(x, y, color.NRGBA64{r, g, b, a})
}
}
} else {
img := dst.(*image.NRGBA)
for y := ymin; y < rMaxY; y++ {
min := img.PixOffset(xmin, y)
max := img.PixOffset(rMaxX, y)
i0, i1 := (y-ymin)*(xmax-xmin)*4, (y-ymin+1)*(xmax-xmin)*4
if i1 > len(d.buf) {
return errNoPixels
}
copy(img.Pix[min:max], d.buf[i0:i1])
}
}
case mRGBA:
if d.bpp == 16 {
img := dst.(*image.RGBA64)
for y := ymin; y < rMaxY; y++ {
for x := xmin; x < rMaxX; x++ {
if d.off+8 > len(d.buf) {
return errNoPixels
}
r := d.byteOrder.Uint16(d.buf[d.off+0 : d.off+2])
g := d.byteOrder.Uint16(d.buf[d.off+2 : d.off+4])
b := d.byteOrder.Uint16(d.buf[d.off+4 : d.off+6])
a := d.byteOrder.Uint16(d.buf[d.off+6 : d.off+8])
d.off += 8
img.SetRGBA64(x, y, color.RGBA64{r, g, b, a})
}
}
} else {
img := dst.(*image.RGBA)
for y := ymin; y < rMaxY; y++ {
min := img.PixOffset(xmin, y)
max := img.PixOffset(rMaxX, y)
i0, i1 := (y-ymin)*(xmax-xmin)*4, (y-ymin+1)*(xmax-xmin)*4
if i1 > len(d.buf) {
return errNoPixels
}
copy(img.Pix[min:max], d.buf[i0:i1])
}
}
case mCMYK:
// d.bpp must be 8
img := dst.(*image.CMYK)
for y := ymin; y < rMaxY; y++ {
min := img.PixOffset(xmin, y)
max := img.PixOffset(rMaxX, y)
i0, i1 := (y-ymin)*(xmax-xmin)*4, (y-ymin+1)*(xmax-xmin)*4
if i1 > len(d.buf) {
return errNoPixels
}
copy(img.Pix[min:max], d.buf[i0:i1])
}
}
return nil
}
func newDecoder(r io.Reader) (*decoder, error) {
d := &decoder{
r: newReaderAt(r),
features: make(map[int][]uint),
}
p := make([]byte, 8)
if _, err := d.r.ReadAt(p, 0); err != nil {
return nil, err
}
switch string(p[0:4]) {
case leHeader:
d.byteOrder = binary.LittleEndian
case beHeader:
d.byteOrder = binary.BigEndian
default:
return nil, FormatError("malformed header")
}
ifdOffset := int64(d.byteOrder.Uint32(p[4:8]))
// The first two bytes contain the number of entries (12 bytes each).
if _, err := d.r.ReadAt(p[0:2], ifdOffset); err != nil {
return nil, err
}
numItems := int(d.byteOrder.Uint16(p[0:2]))
// All IFD entries are read in one chunk.
p = make([]byte, ifdLen*numItems)
if _, err := d.r.ReadAt(p, ifdOffset+2); err != nil {
return nil, err
}
prevTag := -1
for i := 0; i < len(p); i += ifdLen {
tag, err := d.parseIFD(p[i : i+ifdLen])
if err != nil {
return nil, err
}
if tag <= prevTag {
return nil, FormatError("tags are not sorted in ascending order")
}
prevTag = tag
}
d.config.Width = int(d.firstVal(tImageWidth))
d.config.Height = int(d.firstVal(tImageLength))
if _, ok := d.features[tBitsPerSample]; !ok {
// Default is 1 per specification.
d.features[tBitsPerSample] = []uint{1}
}
d.bpp = d.firstVal(tBitsPerSample)
switch d.bpp {
case 0:
return nil, FormatError("BitsPerSample must not be 0")
case 1, 8, 16:
// Nothing to do, these are accepted by this implementation.
default:
return nil, UnsupportedError(fmt.Sprintf("BitsPerSample of %v", d.bpp))
}
// Determine the image mode.
switch d.firstVal(tPhotometricInterpretation) {
case pRGB:
if d.bpp == 16 {
for _, b := range d.features[tBitsPerSample] {
if b != 16 {
return nil, FormatError("wrong number of samples for 16bit RGB")
}
}
} else {
for _, b := range d.features[tBitsPerSample] {
if b != 8 {
return nil, FormatError("wrong number of samples for 8bit RGB")
}
}
}
// RGB images normally have 3 samples per pixel.
// If there are more, ExtraSamples (p. 31-32 of the spec)
// gives their meaning (usually an alpha channel).
//
// This implementation does not support extra samples
// of an unspecified type.
switch len(d.features[tBitsPerSample]) {
case 3:
d.mode = mRGB
if d.bpp == 16 {
d.config.ColorModel = color.RGBA64Model
} else {
d.config.ColorModel = color.RGBAModel
}
case 4:
switch d.firstVal(tExtraSamples) {
case 1:
d.mode = mRGBA
if d.bpp == 16 {
d.config.ColorModel = color.RGBA64Model
} else {
d.config.ColorModel = color.RGBAModel
}
case 2:
d.mode = mNRGBA
if d.bpp == 16 {
d.config.ColorModel = color.NRGBA64Model
} else {
d.config.ColorModel = color.NRGBAModel
}
default:
return nil, FormatError("wrong number of samples for RGB")
}
default:
return nil, FormatError("wrong number of samples for RGB")
}
case pPaletted:
d.mode = mPaletted
d.config.ColorModel = color.Palette(d.palette)
case pWhiteIsZero:
d.mode = mGrayInvert
if d.bpp == 16 {
d.config.ColorModel = color.Gray16Model
} else {
d.config.ColorModel = color.GrayModel
}
case pBlackIsZero:
d.mode = mGray
if d.bpp == 16 {
d.config.ColorModel = color.Gray16Model
} else {
d.config.ColorModel = color.GrayModel
}
case pCMYK:
d.mode = mCMYK
if d.bpp == 16 {
return nil, UnsupportedError(fmt.Sprintf("CMYK BitsPerSample of %v", d.bpp))
}
d.config.ColorModel = color.CMYKModel
default:
return nil, UnsupportedError("color model")
}
return d, nil
}
// DecodeConfig returns the color model and dimensions of a TIFF image without
// decoding the entire image.
func DecodeConfig(r io.Reader) (image.Config, error) {
d, err := newDecoder(r)
if err != nil {
return image.Config{}, err
}
return d.config, nil
}
func ccittFillOrder(tiffFillOrder uint) ccitt.Order {
if tiffFillOrder == 2 {
return ccitt.LSB
}
return ccitt.MSB
}
// Decode reads a TIFF image from r and returns it as an image.Image.
// The type of Image returned depends on the contents of the TIFF.
func Decode(r io.Reader) (img image.Image, err error) {
d, err := newDecoder(r)
if err != nil {
return
}
blockPadding := false
blockWidth := d.config.Width
blockHeight := d.config.Height
blocksAcross := 1
blocksDown := 1
if d.config.Width == 0 {
blocksAcross = 0
}
if d.config.Height == 0 {
blocksDown = 0
}
var blockOffsets, blockCounts []uint
if int(d.firstVal(tTileWidth)) != 0 {
blockPadding = true
blockWidth = int(d.firstVal(tTileWidth))
blockHeight = int(d.firstVal(tTileLength))
if blockWidth != 0 {
blocksAcross = (d.config.Width + blockWidth - 1) / blockWidth
}
if blockHeight != 0 {
blocksDown = (d.config.Height + blockHeight - 1) / blockHeight
}
blockCounts = d.features[tTileByteCounts]
blockOffsets = d.features[tTileOffsets]
} else {
if int(d.firstVal(tRowsPerStrip)) != 0 {
blockHeight = int(d.firstVal(tRowsPerStrip))
}
if blockHeight != 0 {
blocksDown = (d.config.Height + blockHeight - 1) / blockHeight
}
blockOffsets = d.features[tStripOffsets]
blockCounts = d.features[tStripByteCounts]
}
// Check if we have the right number of strips/tiles, offsets and counts.
if n := blocksAcross * blocksDown; len(blockOffsets) < n || len(blockCounts) < n {
return nil, FormatError("inconsistent header")
}
imgRect := image.Rect(0, 0, d.config.Width, d.config.Height)
switch d.mode {
case mGray, mGrayInvert:
if d.bpp == 16 {
img = image.NewGray16(imgRect)
} else {
img = image.NewGray(imgRect)
}
case mPaletted:
img = image.NewPaletted(imgRect, d.palette)
case mNRGBA:
if d.bpp == 16 {
img = image.NewNRGBA64(imgRect)
} else {
img = image.NewNRGBA(imgRect)
}
case mRGB, mRGBA:
if d.bpp == 16 {
img = image.NewRGBA64(imgRect)
} else {
img = image.NewRGBA(imgRect)
}
case mCMYK:
img = image.NewCMYK(imgRect)
}
for i := 0; i < blocksAcross; i++ {
blkW := blockWidth
if !blockPadding && i == blocksAcross-1 && d.config.Width%blockWidth != 0 {
blkW = d.config.Width % blockWidth
}
for j := 0; j < blocksDown; j++ {
blkH := blockHeight
if !blockPadding && j == blocksDown-1 && d.config.Height%blockHeight != 0 {
blkH = d.config.Height % blockHeight
}
offset := int64(blockOffsets[j*blocksAcross+i])
n := int64(blockCounts[j*blocksAcross+i])
// LSBToMSB := d.firstVal(tFillOrder) == 2
// order := ccitt.MSB
// if LSBToMSB {
// order = ccitt.LSB
// }
switch d.firstVal(tCompression) {
// According to the spec, Compression does not have a default value,
// but some tools interpret a missing Compression value as none so we do
// the same.
case cNone, 0:
if b, ok := d.r.(*buffer); ok {
d.buf, err = b.Slice(int(offset), int(n))
} else {
d.buf = make([]byte, n)
_, err = d.r.ReadAt(d.buf, offset)
}
case cG3:
inv := d.firstVal(tPhotometricInterpretation) == pWhiteIsZero
order := ccittFillOrder(d.firstVal(tFillOrder))
r := ccitt.NewReader(io.NewSectionReader(d.r, offset, n), order, ccitt.Group3, blkW, blkH, &ccitt.Options{Invert: inv, Align: false})
d.buf, err = ioutil.ReadAll(r)
case cG4:
inv := d.firstVal(tPhotometricInterpretation) == pWhiteIsZero
order := ccittFillOrder(d.firstVal(tFillOrder))
r := ccitt.NewReader(io.NewSectionReader(d.r, offset, n), order, ccitt.Group4, blkW, blkH, &ccitt.Options{Invert: inv, Align: false})
d.buf, err = ioutil.ReadAll(r)
case cLZW:
r := lzw.NewReader(io.NewSectionReader(d.r, offset, n), true)
d.buf, err = ioutil.ReadAll(r)
r.Close()
case cDeflate, cDeflateOld:
var r io.ReadCloser
r, err = zlib.NewReader(io.NewSectionReader(d.r, offset, n))
if err != nil {
return nil, err
}
d.buf, err = ioutil.ReadAll(r)
r.Close()
case cPackBits:
d.buf, err = unpackBits(io.NewSectionReader(d.r, offset, n))
default:
err = UnsupportedError(fmt.Sprintf("compression value %d", d.firstVal(tCompression)))
}
if err != nil {
return nil, err
}
xmin := i * blockWidth
ymin := j * blockHeight
xmax := xmin + blkW
ymax := ymin + blkH
err = d.decode(img, xmin, ymin, xmax, ymax)
if err != nil {
return nil, err
}
}
}
return
}
func init() {
image.RegisterFormat("tiff", leHeader, Decode, DecodeConfig)
image.RegisterFormat("tiff", beHeader, Decode, DecodeConfig)
}

482
vendor/github.com/hhrutter/tiff/writer.go generated vendored Normal file
View File

@ -0,0 +1,482 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package tiff
import (
"bytes"
"compress/zlib"
"encoding/binary"
"fmt"
"image"
"io"
"sort"
"github.com/hhrutter/lzw"
)
// The TIFF format allows to choose the order of the different elements freely.
// The basic structure of a TIFF file written by this package is:
//
// 1. Header (8 bytes).
// 2. Image data.
// 3. Image File Directory (IFD).
// 4. "Pointer area" for larger entries in the IFD.
// We only write little-endian TIFF files.
var enc = binary.LittleEndian
// An ifdEntry is a single entry in an Image File Directory.
// A value of type dtRational is composed of two 32-bit values,
// thus data contains two uints (numerator and denominator) for a single number.
type ifdEntry struct {
tag int
datatype int
data []uint32
}
func (e ifdEntry) putData(p []byte) {
for _, d := range e.data {
switch e.datatype {
case dtByte, dtASCII:
p[0] = byte(d)
p = p[1:]
case dtShort:
enc.PutUint16(p, uint16(d))
p = p[2:]
case dtLong, dtRational:
enc.PutUint32(p, uint32(d))
p = p[4:]
}
}
}
type byTag []ifdEntry
func (d byTag) Len() int { return len(d) }
func (d byTag) Less(i, j int) bool { return d[i].tag < d[j].tag }
func (d byTag) Swap(i, j int) { d[i], d[j] = d[j], d[i] }
func encodeGray(w io.Writer, pix []uint8, dx, dy, stride int, predictor bool) error {
if !predictor {
return writePix(w, pix, dy, dx, stride)
}
buf := make([]byte, dx)
for y := 0; y < dy; y++ {
min := y*stride + 0
max := y*stride + dx
off := 0
var v0 uint8
for i := min; i < max; i++ {
v1 := pix[i]
buf[off] = v1 - v0
v0 = v1
off++
}
if _, err := w.Write(buf); err != nil {
return err
}
}
return nil
}
func encodeGray16(w io.Writer, pix []uint8, dx, dy, stride int, predictor bool) error {
buf := make([]byte, dx*2)
for y := 0; y < dy; y++ {
min := y*stride + 0
max := y*stride + dx*2
off := 0
var v0 uint16
for i := min; i < max; i += 2 {
// An image.Gray16's Pix is in big-endian order.
v1 := uint16(pix[i])<<8 | uint16(pix[i+1])
if predictor {
v0, v1 = v1, v1-v0
}
// We only write little-endian TIFF files.
buf[off+0] = byte(v1)
buf[off+1] = byte(v1 >> 8)
off += 2
}
if _, err := w.Write(buf); err != nil {
return err
}
}
return nil
}
func encodeRGBA(w io.Writer, pix []uint8, dx, dy, stride int, predictor bool) error {
if !predictor {
return writePix(w, pix, dy, dx*4, stride)
}
buf := make([]byte, dx*4)
for y := 0; y < dy; y++ {
min := y*stride + 0
max := y*stride + dx*4
off := 0
var r0, g0, b0, a0 uint8
for i := min; i < max; i += 4 {
r1, g1, b1, a1 := pix[i+0], pix[i+1], pix[i+2], pix[i+3]
buf[off+0] = r1 - r0
buf[off+1] = g1 - g0
buf[off+2] = b1 - b0
buf[off+3] = a1 - a0
off += 4
r0, g0, b0, a0 = r1, g1, b1, a1
}
if _, err := w.Write(buf); err != nil {
return err
}
}
return nil
}
func encodeRGBA64(w io.Writer, pix []uint8, dx, dy, stride int, predictor bool) error {
buf := make([]byte, dx*8)
for y := 0; y < dy; y++ {
min := y*stride + 0
max := y*stride + dx*8
off := 0
var r0, g0, b0, a0 uint16
for i := min; i < max; i += 8 {
// An image.RGBA64's Pix is in big-endian order.
r1 := uint16(pix[i+0])<<8 | uint16(pix[i+1])
g1 := uint16(pix[i+2])<<8 | uint16(pix[i+3])
b1 := uint16(pix[i+4])<<8 | uint16(pix[i+5])
a1 := uint16(pix[i+6])<<8 | uint16(pix[i+7])
if predictor {
r0, r1 = r1, r1-r0
g0, g1 = g1, g1-g0
b0, b1 = b1, b1-b0
a0, a1 = a1, a1-a0
}
// We only write little-endian TIFF files.
buf[off+0] = byte(r1)
buf[off+1] = byte(r1 >> 8)
buf[off+2] = byte(g1)
buf[off+3] = byte(g1 >> 8)
buf[off+4] = byte(b1)
buf[off+5] = byte(b1 >> 8)
buf[off+6] = byte(a1)
buf[off+7] = byte(a1 >> 8)
off += 8
}
if _, err := w.Write(buf); err != nil {
return err
}
}
return nil
}
func encodeCMYK(w io.Writer, pix []uint8, dx, dy, stride int, predictor bool) error {
if !predictor {
return writePix(w, pix, dy, dx*4, stride)
}
buf := make([]byte, dx*4)
for y := 0; y < dy; y++ {
min := y*stride + 0
max := y*stride + dx*4
off := 0
var c0, m0, y0, k0 uint8
for i := min; i < max; i += 4 {
c1, m1, y1, k1 := pix[i+0], pix[i+1], pix[i+2], pix[i+3]
buf[off+0] = c1 - c0
buf[off+1] = m1 - m0
buf[off+2] = y1 - y0
buf[off+3] = k1 - k0
off += 4
c0, m0, y0, k0 = c1, m1, y1, k1
}
if _, err := w.Write(buf); err != nil {
return err
}
}
return nil
}
func encode(w io.Writer, m image.Image, predictor bool) error {
bounds := m.Bounds()
buf := make([]byte, 4*bounds.Dx())
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
off := 0
if predictor {
var r0, g0, b0, a0 uint8
for x := bounds.Min.X; x < bounds.Max.X; x++ {
r, g, b, a := m.At(x, y).RGBA()
r1 := uint8(r >> 8)
g1 := uint8(g >> 8)
b1 := uint8(b >> 8)
a1 := uint8(a >> 8)
buf[off+0] = r1 - r0
buf[off+1] = g1 - g0
buf[off+2] = b1 - b0
buf[off+3] = a1 - a0
off += 4
r0, g0, b0, a0 = r1, g1, b1, a1
}
} else {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
r, g, b, a := m.At(x, y).RGBA()
buf[off+0] = uint8(r >> 8)
buf[off+1] = uint8(g >> 8)
buf[off+2] = uint8(b >> 8)
buf[off+3] = uint8(a >> 8)
off += 4
}
}
if _, err := w.Write(buf); err != nil {
return err
}
}
return nil
}
// writePix writes the internal byte array of an image to w. It is less general
// but much faster then encode. writePix is used when pix directly
// corresponds to one of the TIFF image types.
func writePix(w io.Writer, pix []byte, nrows, length, stride int) error {
if length == stride {
_, err := w.Write(pix[:nrows*length])
return err
}
for ; nrows > 0; nrows-- {
if _, err := w.Write(pix[:length]); err != nil {
return err
}
pix = pix[stride:]
}
return nil
}
func writeIFD(w io.Writer, ifdOffset int, d []ifdEntry) error {
var buf [ifdLen]byte
// Make space for "pointer area" containing IFD entry data
// longer than 4 bytes.
parea := make([]byte, 1024)
pstart := ifdOffset + ifdLen*len(d) + 6
var o int // Current offset in parea.
// The IFD has to be written with the tags in ascending order.
sort.Sort(byTag(d))
// Write the number of entries in this IFD.
if err := binary.Write(w, enc, uint16(len(d))); err != nil {
return err
}
for _, ent := range d {
enc.PutUint16(buf[0:2], uint16(ent.tag))
enc.PutUint16(buf[2:4], uint16(ent.datatype))
count := uint32(len(ent.data))
if ent.datatype == dtRational {
count /= 2
}
enc.PutUint32(buf[4:8], count)
datalen := int(count * lengths[ent.datatype])
if datalen <= 4 {
ent.putData(buf[8:12])
} else {
if (o + datalen) > len(parea) {
newlen := len(parea) + 1024
for (o + datalen) > newlen {
newlen += 1024
}
newarea := make([]byte, newlen)
copy(newarea, parea)
parea = newarea
}
ent.putData(parea[o : o+datalen])
enc.PutUint32(buf[8:12], uint32(pstart+o))
o += datalen
}
if _, err := w.Write(buf[:]); err != nil {
return err
}
}
// The IFD ends with the offset of the next IFD in the file,
// or zero if it is the last one (page 14).
if err := binary.Write(w, enc, uint32(0)); err != nil {
return err
}
_, err := w.Write(parea[:o])
return err
}
// Options are the encoding parameters.
type Options struct {
// Compression is the type of compression used.
Compression CompressionType
// Predictor determines whether a differencing predictor is used;
// if true, instead of each pixel's color, the color difference to the
// preceding one is saved. This improves the compression for certain
// types of images and compressors. For example, it works well for
// photos with Deflate compression.
Predictor bool
}
// Encode writes the image m to w. opt determines the options used for
// encoding, such as the compression type. If opt is nil, an uncompressed
// image is written.
func Encode(w io.Writer, m image.Image, opt *Options) error {
d := m.Bounds().Size()
compression := uint32(cNone)
predictor := false
if opt != nil {
compression = opt.Compression.specValue()
// The TIFF 6.0 spec (June,1992) says the predictor field is only to be used with LZW. (See page 64).
// Yet this TIFF writer also allows prediction for Deflate compression.
// This makes sense as Deflate is supposedly the successor to LWZ.
// Also both PNG and PDF use Deflate with predictors.
predictor = opt.Predictor && compression == cLZW || compression == cDeflate
}
_, err := io.WriteString(w, leHeader)
if err != nil {
return err
}
// Compressed data is written into a buffer first, so that we
// know the compressed size.
var buf bytes.Buffer
// dst holds the destination for the pixel data of the image --
// either w or a writer to buf.
var dst io.Writer
// imageLen is the length of the pixel data in bytes.
// The offset of the IFD is imageLen + 8 header bytes.
var imageLen int
switch compression {
case cNone:
dst = w
// Write IFD offset before outputting pixel data.
switch m.(type) {
case *image.Paletted:
imageLen = d.X * d.Y * 1
case *image.Gray:
imageLen = d.X * d.Y * 1
case *image.Gray16:
imageLen = d.X * d.Y * 2
case *image.RGBA64:
imageLen = d.X * d.Y * 8
case *image.NRGBA64:
imageLen = d.X * d.Y * 8
case *image.CMYK:
imageLen = d.X * d.Y * 4
default:
imageLen = d.X * d.Y * 4
}
err = binary.Write(w, enc, uint32(imageLen+8))
case cLZW:
dst = lzw.NewWriter(&buf, true)
case cDeflate:
dst = zlib.NewWriter(&buf)
default:
err = UnsupportedError(fmt.Sprintf("compression value %d", compression))
}
if err != nil {
return err
}
pr := uint32(prNone)
photometricInterpretation := uint32(pRGB)
samplesPerPixel := uint32(4)
bitsPerSample := []uint32{8, 8, 8, 8}
extraSamples := uint32(0)
colorMap := []uint32{}
if predictor {
pr = prHorizontal
}
switch m := m.(type) {
case *image.Paletted:
photometricInterpretation = pPaletted
samplesPerPixel = 1
bitsPerSample = []uint32{8}
colorMap = make([]uint32, 256*3)
for i := 0; i < 256 && i < len(m.Palette); i++ {
r, g, b, _ := m.Palette[i].RGBA()
colorMap[i+0*256] = uint32(r)
colorMap[i+1*256] = uint32(g)
colorMap[i+2*256] = uint32(b)
}
err = encodeGray(dst, m.Pix, d.X, d.Y, m.Stride, predictor)
case *image.Gray:
photometricInterpretation = pBlackIsZero
samplesPerPixel = 1
bitsPerSample = []uint32{8}
err = encodeGray(dst, m.Pix, d.X, d.Y, m.Stride, predictor)
case *image.Gray16:
photometricInterpretation = pBlackIsZero
samplesPerPixel = 1
bitsPerSample = []uint32{16}
err = encodeGray16(dst, m.Pix, d.X, d.Y, m.Stride, predictor)
case *image.NRGBA:
extraSamples = 2 // Unassociated alpha.
err = encodeRGBA(dst, m.Pix, d.X, d.Y, m.Stride, predictor)
case *image.NRGBA64:
extraSamples = 2 // Unassociated alpha.
bitsPerSample = []uint32{16, 16, 16, 16}
err = encodeRGBA64(dst, m.Pix, d.X, d.Y, m.Stride, predictor)
case *image.RGBA:
extraSamples = 1 // Associated alpha.
err = encodeRGBA(dst, m.Pix, d.X, d.Y, m.Stride, predictor)
case *image.RGBA64:
extraSamples = 1 // Associated alpha.
bitsPerSample = []uint32{16, 16, 16, 16}
err = encodeRGBA64(dst, m.Pix, d.X, d.Y, m.Stride, predictor)
case *image.CMYK:
photometricInterpretation = uint32(pCMYK)
samplesPerPixel = uint32(4)
bitsPerSample = []uint32{8, 8, 8, 8}
err = encodeCMYK(dst, m.Pix, d.X, d.Y, m.Stride, predictor)
default:
extraSamples = 1 // Associated alpha.
err = encode(dst, m, predictor)
}
if err != nil {
return err
}
if compression != cNone {
if err = dst.(io.Closer).Close(); err != nil {
return err
}
imageLen = buf.Len()
if err = binary.Write(w, enc, uint32(imageLen+8)); err != nil {
return err
}
if _, err = buf.WriteTo(w); err != nil {
return err
}
}
ifd := []ifdEntry{
{tImageWidth, dtShort, []uint32{uint32(d.X)}},
{tImageLength, dtShort, []uint32{uint32(d.Y)}},
{tBitsPerSample, dtShort, bitsPerSample},
{tCompression, dtShort, []uint32{compression}},
{tPhotometricInterpretation, dtShort, []uint32{photometricInterpretation}},
{tStripOffsets, dtLong, []uint32{8}},
{tSamplesPerPixel, dtShort, []uint32{samplesPerPixel}},
{tRowsPerStrip, dtShort, []uint32{uint32(d.Y)}},
{tStripByteCounts, dtLong, []uint32{uint32(imageLen)}},
// There is currently no support for storing the image
// resolution, so give a bogus value of 72x72 dpi.
{tXResolution, dtRational, []uint32{72, 1}},
{tYResolution, dtRational, []uint32{72, 1}},
{tResolutionUnit, dtShort, []uint32{resPerInch}},
}
if pr != prNone {
ifd = append(ifd, ifdEntry{tPredictor, dtShort, []uint32{pr}})
}
if len(colorMap) != 0 {
ifd = append(ifd, ifdEntry{tColorMap, dtShort, colorMap})
}
if extraSamples > 0 {
ifd = append(ifd, ifdEntry{tExtraSamples, dtShort, []uint32{extraSamples}})
}
return writeIFD(w, imageLen+8, ifd)
}

View File

@ -1,3 +1,4 @@
libwallet/.gitignore# binary
libwallet
.build

View File

@ -1,12 +1,13 @@
package libwallet
import (
"fmt"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/muun/libwallet/addresses"
"github.com/pkg/errors"
)
// CreateAddressV1 returns a P2PKH MuunAddress from a publicKey for use in TransactionSchemeV1
@ -23,12 +24,12 @@ type coinV1 struct {
func (c *coinV1) SignInput(index int, tx *wire.MsgTx, userKey *HDPrivateKey, _ *HDPublicKey) error {
userKey, err := userKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive user key")
return fmt.Errorf("failed to derive user key: %w", err)
}
sig, err := c.signature(index, tx, userKey)
if err != nil {
return errors.Wrapf(err, "failed to sign V1 input")
return fmt.Errorf("failed to sign V1 input: %w", err)
}
builder := txscript.NewScriptBuilder()
@ -36,7 +37,7 @@ func (c *coinV1) SignInput(index int, tx *wire.MsgTx, userKey *HDPrivateKey, _ *
builder.AddData(userKey.PublicKey().Raw())
script, err := builder.Script()
if err != nil {
return errors.Wrapf(err, "failed to generate signing script")
return fmt.Errorf("failed to generate signing script: %w", err)
}
txInput := tx.TxIn[index]
@ -52,7 +53,7 @@ func (c *coinV1) createRedeemScript(publicKey *HDPublicKey) ([]byte, error) {
userAddress, err := btcutil.NewAddressPubKey(publicKey.Raw(), c.Network)
if err != nil {
return nil, errors.Wrapf(err, "failed to generate address for user")
return nil, fmt.Errorf("failed to generate address for user: %w", err)
}
return txscript.PayToAddrScript(userAddress.AddressPubKeyHash())
@ -62,17 +63,17 @@ func (c *coinV1) signature(index int, tx *wire.MsgTx, userKey *HDPrivateKey) ([]
redeemScript, err := c.createRedeemScript(userKey.PublicKey())
if err != nil {
return nil, errors.Wrapf(err, "failed to build reedem script for signing")
return nil, fmt.Errorf("failed to build reedem script for signing: %w", err)
}
privKey, err := userKey.key.ECPrivKey()
if err != nil {
return nil, errors.Wrapf(err, "failed to produce EC priv key for signing")
return nil, fmt.Errorf("failed to produce EC priv key for signing: %w", err)
}
sig, err := txscript.RawTxInSignature(tx, index, redeemScript, txscript.SigHashAll, privKey)
if err != nil {
return nil, errors.Wrapf(err, "failed to sign V1 input")
return nil, fmt.Errorf("failed to sign V1 input: %w", err)
}
return sig, nil

View File

@ -1,10 +1,12 @@
package libwallet
import (
"errors"
"fmt"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/txscript"
"github.com/muun/libwallet/addresses"
"github.com/pkg/errors"
"github.com/btcsuite/btcd/wire"
)
@ -24,23 +26,23 @@ type coinV2 struct {
func (c *coinV2) SignInput(index int, tx *wire.MsgTx, userKey *HDPrivateKey, muunKey *HDPublicKey) error {
userKey, err := userKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive user key")
return fmt.Errorf("failed to derive user key: %w", err)
}
muunKey, err = muunKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive muun key")
return fmt.Errorf("failed to derive muun key: %w", err)
}
if len(c.MuunSignature) == 0 {
return errors.Errorf("muun signature must be present")
return errors.New("muun signature must be present")
}
txInput := tx.TxIn[index]
redeemScript, err := createRedeemScriptV2(userKey.PublicKey(), muunKey)
if err != nil {
return errors.Wrapf(err, "failed to build reedem script for signing")
return fmt.Errorf("failed to build reedem script for signing: %w", err)
}
sig, err := c.signature(index, tx, userKey.PublicKey(), muunKey, userKey)
@ -59,7 +61,7 @@ func (c *coinV2) SignInput(index int, tx *wire.MsgTx, userKey *HDPrivateKey, muu
builder.AddData(redeemScript)
script, err := builder.Script()
if err != nil {
return errors.Wrapf(err, "failed to generate signing script")
return fmt.Errorf("failed to generate signing script: %w", err)
}
txInput.SignatureScript = script
@ -71,12 +73,12 @@ func (c *coinV2) FullySignInput(index int, tx *wire.MsgTx, userKey, muunKey *HDP
derivedUserKey, err := userKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive user key")
return fmt.Errorf("failed to derive user key: %w", err)
}
derivedMuunKey, err := muunKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive muun key")
return fmt.Errorf("failed to derive muun key: %w", err)
}
muunSignature, err := c.signature(index, tx, derivedUserKey.PublicKey(), derivedMuunKey.PublicKey(), derivedMuunKey)
@ -92,17 +94,17 @@ func (c *coinV2) signature(index int, tx *wire.MsgTx, userKey, muunKey *HDPublic
redeemScript, err := createRedeemScriptV2(userKey, muunKey)
if err != nil {
return nil, errors.Wrapf(err, "failed to build reedem script for signing")
return nil, fmt.Errorf("failed to build reedem script for signing: %w", err)
}
privKey, err := signingKey.key.ECPrivKey()
if err != nil {
return nil, errors.Wrapf(err, "failed to produce EC priv key for signing")
return nil, fmt.Errorf("failed to produce EC priv key for signing: %w", err)
}
sig, err := txscript.RawTxInSignature(tx, index, redeemScript, txscript.SigHashAll, privKey)
if err != nil {
return nil, errors.Wrapf(err, "failed to sign V2 output")
return nil, fmt.Errorf("failed to sign V2 output: %w", err)
}
return sig, nil

View File

@ -1,11 +1,12 @@
package libwallet
import (
"errors"
"fmt"
"github.com/btcsuite/btcutil"
"github.com/muun/libwallet/addresses"
"github.com/pkg/errors"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/wire"
)
@ -26,16 +27,16 @@ func (c *coinV3) SignInput(index int, tx *wire.MsgTx, userKey *HDPrivateKey, muu
userKey, err := userKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive user key")
return fmt.Errorf("failed to derive user key: %w", err)
}
muunKey, err = muunKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive muun key")
return fmt.Errorf("failed to derive muun key: %w", err)
}
if len(c.MuunSignature) == 0 {
return errors.Errorf("muun signature must be present")
return errors.New("muun signature must be present")
}
witnessScript, err := createWitnessScriptV3(userKey.PublicKey(), muunKey)
@ -60,12 +61,12 @@ func (c *coinV3) FullySignInput(index int, tx *wire.MsgTx, userKey, muunKey *HDP
derivedUserKey, err := userKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive user key")
return fmt.Errorf("failed to derive user key: %w", err)
}
derivedMuunKey, err := muunKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive muun key")
return fmt.Errorf("failed to derive muun key: %w", err)
}
muunSignature, err := c.signature(index, tx, derivedUserKey.PublicKey(), derivedMuunKey.PublicKey(), derivedMuunKey)
@ -94,7 +95,7 @@ func (c *coinV3) signature(index int, tx *wire.MsgTx, userKey *HDPublicKey, muun
redeemScript, err := createRedeemScriptV3(userKey, muunKey)
if err != nil {
return nil, errors.Wrapf(err, "failed to build reedem script for signing")
return nil, fmt.Errorf("failed to build reedem script for signing: %w", err)
}
return signNonNativeSegwitInput(

View File

@ -1,11 +1,11 @@
package libwallet
import (
"fmt"
"github.com/btcsuite/btcutil"
"github.com/muun/libwallet/addresses"
"github.com/pkg/errors"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/wire"
)
@ -27,16 +27,16 @@ func (c *coinV4) SignInput(index int, tx *wire.MsgTx, userKey *HDPrivateKey, muu
userKey, err := userKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive user key")
return fmt.Errorf("failed to derive user key: %w", err)
}
muunKey, err = muunKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive muun key")
return fmt.Errorf("failed to derive muun key: %w", err)
}
if len(c.MuunSignature) == 0 {
return errors.Errorf("muun signature must be present")
return fmt.Errorf("muun signature must be present: %w", err)
}
witnessScript, err := createWitnessScriptV4(userKey.PublicKey(), muunKey)
@ -61,12 +61,12 @@ func (c *coinV4) FullySignInput(index int, tx *wire.MsgTx, userKey, muunKey *HDP
derivedUserKey, err := userKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive user key")
return fmt.Errorf("failed to derive user key: %w", err)
}
derivedMuunKey, err := muunKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive muun key")
return fmt.Errorf("failed to derive muun key: %w", err)
}
muunSignature, err := c.signature(index, tx, derivedUserKey.PublicKey(), derivedMuunKey.PublicKey(), derivedMuunKey)

View File

@ -1,6 +1,7 @@
package libwallet
import (
"fmt"
"io/ioutil"
"net/http"
"net/url"
@ -8,10 +9,10 @@ import (
"strings"
"github.com/muun/libwallet/addresses"
"github.com/muun/libwallet/errors"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcutil"
"github.com/pkg/errors"
"google.golang.org/protobuf/proto"
)
@ -44,11 +45,11 @@ func GetPaymentURI(rawInput string, network *Network) (*MuunPaymentURI, error) {
bitcoinUri, components := buildUriFromString(rawInput, bitcoinScheme)
if components == nil {
return nil, errors.Errorf("failed to parse uri %v", rawInput)
return nil, errors.Errorf(ErrInvalidURI, "failed to parse uri %v", rawInput)
}
if components.Scheme != "bitcoin" {
return nil, errors.New("Invalid scheme")
return nil, errors.New(ErrInvalidURI, "Invalid scheme")
}
base58Address := components.Opaque
@ -61,7 +62,7 @@ func GetPaymentURI(rawInput string, network *Network) (*MuunPaymentURI, error) {
queryValues, err := url.ParseQuery(components.RawQuery)
if err != nil {
return nil, errors.Wrapf(err, "Couldnt parse query")
return nil, errors.Errorf(ErrInvalidURI, "Couldn't parse query: %v", err)
}
var label, message, amount string
@ -110,11 +111,11 @@ func GetPaymentURI(rawInput string, network *Network) (*MuunPaymentURI, error) {
// Bech32 check
validatedBase58Address, err := btcutil.DecodeAddress(base58Address, network.network)
if err != nil {
return nil, err
return nil, fmt.Errorf("invalid address: %w", err)
}
if !validatedBase58Address.IsForNet(network.network) {
return nil, errors.Errorf("Network mismatch")
return nil, errors.New(ErrInvalidURI, "Network mismatch")
}
return &MuunPaymentURI{
@ -131,7 +132,7 @@ func GetPaymentURI(rawInput string, network *Network) (*MuunPaymentURI, error) {
func DoPaymentRequestCall(url string, network *Network) (*MuunPaymentURI, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, errors.Wrapf(err, "Failed to create request to: %s", url)
return nil, fmt.Errorf("failed to create request to: %s", url)
}
req.Header.Set("Accept", "application/bitcoin-paymentrequest")
@ -139,35 +140,35 @@ func DoPaymentRequestCall(url string, network *Network) (*MuunPaymentURI, error)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return nil, errors.Wrapf(err, "Failed to make request to: %s", url)
return nil, errors.Errorf(ErrNetwork, "failed to make request to: %s", url)
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, errors.Wrapf(err, "Failed to read body response")
return nil, errors.Errorf(ErrNetwork, "Failed to read body response: %w", err)
}
payReq := &PaymentRequest{}
err = proto.Unmarshal(body, payReq)
if err != nil {
return nil, errors.Wrapf(err, "Failed to Unmarshall paymentRequest")
return nil, fmt.Errorf("failed to unmarshal payment request: %w", err)
}
payDetails := &PaymentDetails{}
err = proto.Unmarshal(payReq.SerializedPaymentDetails, payDetails)
if err != nil {
return nil, errors.Wrapf(err, "Failed to Unmarshall paymentDetails")
return nil, fmt.Errorf("failed to unmarshall payment details: %w", err)
}
if len(payDetails.Outputs) == 0 {
return nil, errors.New("No outputs provided")
return nil, fmt.Errorf("no outputs provided")
}
address, err := getAddressFromScript(payDetails.Outputs[0].Script, network)
if err != nil {
return nil, errors.Wrapf(err, "Failed to get address")
return nil, fmt.Errorf("failed to get address: %w", err)
}
return &MuunPaymentURI{

View File

@ -1,6 +1,8 @@
package addresses
import (
"fmt"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcutil"
@ -12,12 +14,12 @@ func CreateAddressV2(userKey, muunKey *hdkeychain.ExtendedKey, path string, netw
script, err := CreateRedeemScriptV2(userKey, muunKey, network)
if err != nil {
return nil, errors.Wrapf(err, "failed to generate redeem script v2")
return nil, fmt.Errorf("failed to generate redeem script v2: %w", err)
}
address, err := btcutil.NewAddressScriptHash(script, network)
if err != nil {
return nil, errors.Wrapf(err, "failed to generate multisig address")
return nil, fmt.Errorf("failed to generate multisig address: %w", err)
}
return &WalletAddress{

View File

@ -2,13 +2,13 @@ package addresses
import (
"crypto/sha256"
"fmt"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcutil"
"github.com/btcsuite/btcutil/hdkeychain"
"github.com/pkg/errors"
)
func CreateAddressV3(userKey, muunKey *hdkeychain.ExtendedKey, path string, network *chaincfg.Params) (*WalletAddress, error) {
@ -33,7 +33,7 @@ func CreateAddressV3(userKey, muunKey *hdkeychain.ExtendedKey, path string, netw
func CreateRedeemScriptV3(userKey, muunKey *hdkeychain.ExtendedKey, network *chaincfg.Params) ([]byte, error) {
witnessScript, err := CreateWitnessScriptV3(userKey, muunKey, network)
if err != nil {
return nil, errors.Wrapf(err, "failed to generate redeem script v3")
return nil, fmt.Errorf("failed to generate redeem script v3: %w", err)
}
return createNonNativeSegwitRedeemScript(witnessScript)

View File

@ -2,11 +2,11 @@ package addresses
import (
"crypto/sha256"
"fmt"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcutil"
"github.com/btcsuite/btcutil/hdkeychain"
"github.com/pkg/errors"
)
// CreateAddressV4 returns a P2WSH WalletAddress from a user HD-pubkey and a Muun co-signing HD-pubkey.
@ -14,7 +14,7 @@ func CreateAddressV4(userKey, muunKey *hdkeychain.ExtendedKey, path string, netw
witnessScript, err := CreateWitnessScriptV4(userKey, muunKey, network)
if err != nil {
return nil, errors.Wrapf(err, "failed to generate witness script v4")
return nil, fmt.Errorf("failed to generate witness script v4: %w", err)
}
witnessScript256 := sha256.Sum256(witnessScript)

View File

@ -4,8 +4,7 @@ import (
"bytes"
"crypto/aes"
"crypto/cipher"
"github.com/pkg/errors"
"errors"
)
const KeySize = 32

View File

@ -5,16 +5,33 @@ import (
"crypto/sha256"
"encoding/binary"
"encoding/hex"
"errors"
"fmt"
"github.com/btcsuite/btcd/btcec"
"github.com/btcsuite/btcutil/base58"
"github.com/pkg/errors"
)
const (
// EncodedKeyLength is the size of a modern encoded key, as exported by the clients.
EncodedKeyLength = 147
// EncodedKeyLengthLegacy is the size of a legacy key, when salt resided only in the 2nd key.
EncodedKeyLengthLegacy = 136
)
type ChallengePrivateKey struct {
key *btcec.PrivateKey
}
type encryptedPrivateKey struct {
Version uint8
Birthday uint16
EphPublicKey []byte // 33-byte compressed public-key
CipherText []byte // 64-byte encrypted text
Salt []byte // (optional) 8-byte salt
}
type DecryptedPrivateKey struct {
Key *HDPrivateKey
Birthday int
@ -37,7 +54,7 @@ func (k *ChallengePrivateKey) SignSha(payload []byte) ([]byte, error) {
sig, err := k.key.Sign(hash[:])
if err != nil {
return nil, errors.Wrapf(err, "failed to sign payload")
return nil, fmt.Errorf("failed to sign payload: %w", err)
}
return sig.Serialize(), nil
@ -53,43 +70,12 @@ func (k *ChallengePrivateKey) PubKey() *ChallengePublicKey {
}
func (k *ChallengePrivateKey) DecryptKey(encryptedKey string, network *Network) (*DecryptedPrivateKey, error) {
reader := bytes.NewReader(base58.Decode(encryptedKey))
version, err := reader.ReadByte()
decoded, err := decodeEncryptedPrivateKey(encryptedKey)
if err != nil {
return nil, errors.Wrapf(err, "decrypting key")
}
if version != 2 {
return nil, errors.Errorf("decrypting key: found key version %v, expected 2", version)
return nil, err
}
birthdayBytes := make([]byte, 2)
rawPubEph := make([]byte, serializedPublicKeyLength)
ciphertext := make([]byte, 64)
recoveryCodeSalt := make([]byte, 8)
n, err := reader.Read(birthdayBytes)
if err != nil || n != 2 {
return nil, errors.Errorf("decrypting key: failed to read birthday")
}
birthday := binary.BigEndian.Uint16(birthdayBytes)
n, err = reader.Read(rawPubEph)
if err != nil || n != serializedPublicKeyLength {
return nil, errors.Errorf("decrypting key: failed to read pubeph")
}
n, err = reader.Read(ciphertext)
if err != nil || n != 64 {
return nil, errors.Errorf("decrypting key: failed to read ciphertext")
}
n, err = reader.Read(recoveryCodeSalt)
if err != nil || n != 8 {
return nil, errors.Errorf("decrypting key: failed to read recoveryCodeSalt")
}
plaintext, err := decryptWithPrivKey(k.key, rawPubEph, ciphertext)
plaintext, err := decryptWithPrivKey(k.key, decoded.EphPublicKey, decoded.CipherText)
if err != nil {
return nil, err
}
@ -99,11 +85,68 @@ func (k *ChallengePrivateKey) DecryptKey(encryptedKey string, network *Network)
privKey, err := NewHDPrivateKeyFromBytes(rawPrivKey, rawChainCode, network)
if err != nil {
return nil, errors.Wrapf(err, "decrypting key: failed to parse key")
return nil, fmt.Errorf("decrypting key: failed to parse key: %w", err)
}
return &DecryptedPrivateKey{
privKey,
int(birthday),
int(decoded.Birthday),
}, nil
}
func decodeEncryptedPrivateKey(encodedKey string) (*encryptedPrivateKey, error) {
reader := bytes.NewReader(base58.Decode(encodedKey))
version, err := reader.ReadByte()
if err != nil {
return nil, fmt.Errorf("decrypting key: %w", err)
}
if version != 2 {
return nil, fmt.Errorf("decrypting key: found key version %v, expected 2", version)
}
birthdayBytes := make([]byte, 2)
rawPubEph := make([]byte, serializedPublicKeyLength)
ciphertext := make([]byte, 64)
recoveryCodeSalt := make([]byte, 8)
n, err := reader.Read(birthdayBytes)
if err != nil || n != 2 {
return nil, errors.New("decrypting key: failed to read birthday")
}
birthday := binary.BigEndian.Uint16(birthdayBytes)
n, err = reader.Read(rawPubEph)
if err != nil || n != serializedPublicKeyLength {
return nil, errors.New("decrypting key: failed to read pubeph")
}
n, err = reader.Read(ciphertext)
if err != nil || n != 64 {
return nil, errors.New("decrypting key: failed to read ciphertext")
}
// NOTE:
// The very, very old format for encrypted keys didn't contain the encryption salt in the first
// of the two keys. This is a valid scenario, and a zero-filled salt can be returned.
if shouldHaveSalt(encodedKey) {
n, err = reader.Read(recoveryCodeSalt)
if err != nil || n != 8 {
return nil, errors.New("decrypting key: failed to read recoveryCodeSalt")
}
}
result := &encryptedPrivateKey{
Version: version,
Birthday: birthday,
EphPublicKey: rawPubEph,
CipherText: ciphertext,
Salt: recoveryCodeSalt,
}
return result, nil
}
func shouldHaveSalt(encodedKey string) bool {
return len(encodedKey) > EncodedKeyLengthLegacy // not military-grade logic, but works for now
}

View File

@ -3,10 +3,10 @@ package libwallet
import (
"bytes"
"encoding/binary"
"fmt"
"github.com/btcsuite/btcd/btcec"
"github.com/btcsuite/btcutil/base58"
"github.com/pkg/errors"
)
type ChallengePublicKey struct {
@ -37,7 +37,7 @@ func (k *ChallengePublicKey) EncryptKey(privKey *HDPrivateKey, recoveryCodeSalt
plaintext = append(plaintext, rawHDKey[privKeyStart:privKeyStart+privKeyLength]...)
plaintext = append(plaintext, rawHDKey[chainCodeStart:chainCodeStart+chainCodeLength]...)
if len(plaintext) != 64 {
return "", errors.Errorf("failed to encrypt key: expected payload of 64 bytes, found %v", len(plaintext))
return "", fmt.Errorf("failed to encrypt key: expected payload of 64 bytes, found %v", len(plaintext))
}
pubEph, ciphertext, err := encryptWithPubKey(k.pubKey, plaintext)

View File

@ -1,32 +1,137 @@
package libwallet
import (
"encoding/hex"
"encoding/json"
"fmt"
"github.com/muun/libwallet/emergencykit"
)
// EKInput input struct to fill the PDF
type EKInput struct {
FirstEncryptedKey string
FirstFingerprint string
SecondEncryptedKey string
SecondFingerprint string
}
// EKOutput with the html as string and the verification code
type EKOutput struct {
HTML string
VerificationCode string
Metadata string
}
// GenerateEmergencyKitHTML returns the translated html as a string along with the verification code
// GenerateEmergencyKitHTML returns the translated html as a string along with the verification
// code and the kit metadata, represented in an opaque string.
// After calling this method, clients should use their Chromium/WebKit implementations to render
// the HTML into a PDF (better done there), and then come back to call `AddEmergencyKitMetadata`
// and produce the final PDF (better done here).
func GenerateEmergencyKitHTML(ekParams *EKInput, language string) (*EKOutput, error) {
out, err := emergencykit.GenerateHTML(&emergencykit.Input{
moduleInput := &emergencykit.Input{
FirstEncryptedKey: ekParams.FirstEncryptedKey,
FirstFingerprint: ekParams.FirstFingerprint,
SecondEncryptedKey: ekParams.SecondEncryptedKey,
}, language)
if err != nil {
return nil, err
SecondFingerprint: ekParams.SecondFingerprint,
}
// Create the HTML and the verification code:
htmlWithCode, err := emergencykit.GenerateHTML(moduleInput, language)
if err != nil {
return nil, fmt.Errorf("GenerateEkHtml failed to render: %w", err)
}
// Create and serialize the metadata:
metadata, err := createEmergencyKitMetadata(ekParams)
if err != nil {
return nil, fmt.Errorf("GenerateEkHtml failed to create metadata: %w", err)
}
metadataBytes, err := json.Marshal(&metadata)
if err != nil {
return nil, fmt.Errorf("GenerateEkHtml failed to marshal %s: %w", string(metadataBytes), err)
}
output := &EKOutput{
HTML: htmlWithCode.HTML,
VerificationCode: htmlWithCode.VerificationCode,
Metadata: string(metadataBytes),
}
return output, nil
}
// AddEmergencyKitMetadata produces a copy of the PDF file at `srcFile` with embedded metadata,
// writing it into `dstFile`. The provided metadata must be the same opaque string produced by
// `GenerateEmergencyKitHTML`.
func AddEmergencyKitMetadata(metadataText string, srcFile string, dstFile string) error {
// Initialize the MetadataWriter:
metadataWriter := &emergencykit.MetadataWriter{
SrcFile: srcFile,
DstFile: dstFile,
}
// Deserialize the metadata:
var metadata emergencykit.Metadata
err := json.Unmarshal([]byte(metadataText), &metadata)
if err != nil {
return fmt.Errorf("AddEkMetadata failed to unmarshal: %w", err)
}
err = metadataWriter.WriteMetadata(&metadata)
if err != nil {
return fmt.Errorf("AddEkMetadata failed to write metadata: %w", err)
}
return nil
}
func createEmergencyKitMetadata(ekParams *EKInput) (*emergencykit.Metadata, error) {
// NOTE:
// This method would be more naturally placed in the `emergencykit` module, but given the current
// project structure (heavily determined by `gomobile` and the need for top-level bindings) and
// the use of `decodeEncryptedPrivateKey` this isn't possible. Instead, we peek through the layer
// boundary to craft the object here.
// Decode both keys, to extract their inner properties:
firstKey, err := decodeEncryptedPrivateKey(ekParams.FirstEncryptedKey)
if err != nil {
return nil, fmt.Errorf("createEkMetadata failed to decode first key: %w", err)
}
secondKey, err := decodeEncryptedPrivateKey(ekParams.SecondEncryptedKey)
if err != nil {
return nil, fmt.Errorf("createEkMetadata failed to decode second key: %w", err)
}
// Obtain the list of checksumed output descriptors:
descriptors := emergencykit.GetDescriptors(&emergencykit.DescriptorsData{
FirstFingerprint: ekParams.FirstFingerprint,
SecondFingerprint: ekParams.SecondFingerprint,
})
// Create the keys for the key array:
keys := []*emergencykit.MetadataKey{
createEmergencyKitMetadataKey(firstKey),
createEmergencyKitMetadataKey(secondKey),
}
metadata := &emergencykit.Metadata{
Version: 2,
BirthdayBlock: int(secondKey.Birthday),
EncryptedKeys: keys,
OutputDescriptors: descriptors,
}
return metadata, nil
}
func createEmergencyKitMetadataKey(key *encryptedPrivateKey) *emergencykit.MetadataKey {
return &emergencykit.MetadataKey{
DhPubKey: hex.EncodeToString(key.EphPublicKey),
EncryptedPrivKey: hex.EncodeToString(key.CipherText),
Salt: hex.EncodeToString(key.Salt),
}
return &EKOutput{
HTML: out.HTML,
VerificationCode: out.VerificationCode,
}, nil
}

View File

@ -2,7 +2,6 @@ package emergencykit
type pageData struct {
Css string
Logo string
Content string
}
@ -11,17 +10,11 @@ type contentData struct {
SecondEncryptedKey string
VerificationCode string
CurrentDate string
Descriptors string
IconHelp string
IconPadlock string
}
const logo = `
<svg class="logo" width="65" height="12" viewBox="0 0 65 12" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M2.89661 11.8674V3.24807C2.89661 3.24807 3.72365 2.86307 5.30403 2.86307C6.88441 2.86307 7.58862 3.41307 7.58862 4.96093V11.8674H9.85684V4.96093C9.85684 4.26164 9.85684 3.72736 9.84865 3.31878C10.2089 3.13021 10.9132 2.86307 12.0759 2.86307C13.8774 2.86307 14.5489 3.41307 14.5489 4.96093V11.8674H16.9972V4.96093C16.9972 3.91593 16.7679 2.82378 15.9491 2.03021C15.1221 1.23664 13.9429 0.710205 12.1005 0.710205C10.4464 0.710205 8.98886 1.25235 8.70226 1.37021C7.77696 0.953777 6.47499 0.710205 5.32041 0.710205C3.07675 0.710205 0.456431 1.39378 0.456431 1.39378V11.8674H2.89661Z" fill="#2474CD"/>
<path d="M32.8274 0.710205V11.1838C32.8274 11.1838 30.2309 11.8674 27.7067 11.8674C25.8819 11.8674 24.4268 11.3331 23.6149 10.5474C22.803 9.7538 22.5859 8.66166 22.5859 7.61665V0.710205H24.9895V7.6088C24.9895 9.15666 25.9542 9.70666 27.7227 9.70666C29.4913 9.70666 30.4318 9.32166 30.4318 9.32166V0.710205H32.8274Z" fill="#2474CD"/>
<path d="M48.9016 0.710205V11.1838C48.9016 11.1838 46.3051 11.8674 43.7809 11.8674C41.9561 11.8674 40.501 11.3331 39.6891 10.5474C38.8772 9.7538 38.6602 8.66166 38.6602 7.61665V0.710205H41.0638V7.6088C41.0638 9.15666 42.0284 9.70666 43.797 9.70666C45.5655 9.70666 46.506 9.32166 46.506 9.32166V0.710205H48.9016Z" fill="#2474CD"/>
<path d="M54.7012 11.8674V1.39378C54.7012 1.39378 57.2977 0.710205 59.8219 0.710205C61.6467 0.710205 63.1017 1.24449 63.9137 2.03021C64.7256 2.82378 64.9426 3.91593 64.9426 4.96093V11.8674H62.539V4.96879C62.539 3.42093 61.5744 2.87093 59.8058 2.87093C58.0373 2.87093 57.0967 3.25593 57.0967 3.25593V11.8674H54.7012Z" fill="#2474CD"/>
</svg>
`
const page = `
<!DOCTYPE html>
<html lang="en">
@ -36,7 +29,6 @@ const page = `
</head>
<body>
{{.Logo}}
{{.Content}}
</body>
</html>
@ -44,225 +36,322 @@ const page = `
const contentEN = `
<header>
<div class="title">
<h1>Emergency Kit</h1>
<date>Created on {{.CurrentDate}}</date>
</div>
<div class="verification-code">
<h4>Verification code</h4>
<code>{{.VerificationCode}}</code>
</div>
<h1>Emergency Kit</h1>
<h2>Verification <span class="verification-code">#{{.VerificationCode}}</span></p>
</header>
<section>
<h2>About this document</h2>
<p>Here you'll find the encrypted information you need to transfer your money out of your Muun wallet without
requiring collaboration from anyone, including Muun's own software and servers.</p>
<p>This includes all your private keys (securely encrypted with your Recovery Code) and some additional data related
to your wallet.</p>
<p>With this document and your recovery code at hand, you have complete ownership of your money. Nobody else has
all the pieces. This is why Bitcoin was created: to give people full control of the money they rightly own.</p>
</section>
<section>
<h2>Recovering your money</h2>
<p>To move forward with the transfer of funds, we recommend using our
<a href="https://github.com/muun/recovery">open-source Recovery Tool</a>. It's available for the whole world to
download and examine, and it will always be.</p>
<p>We created it to assist you with the process, but nothing stops you from doing it manually if you're so
inclined.</p>
<p>Go to <strong>github.com/muun/recovery</strong> and follow the instructions to easily transfer your money to a Bitcoin
address of your choosing.</p>
</section>
<section>
<h2>Recovery information</h2>
<p>This is what you'll need for the transfer, plus your recovery code. If these random-seeming codes look daunting,
don't worry: the <a href="https://github.com/muun/recovery">recovery tool</a> will take care of everything.</p>
</section>
<section>
<h3>First Encrypted Private Key</h3>
<div class="data">{{.FirstEncryptedKey}}</div>
<h3>Second Encrypted Private Key</h3>
<div class="data">{{.SecondEncryptedKey}}</div>
<h3>Output descriptors</h3>
<div class="data">
sh(wsh(multi(2, <span class="key-placeholder">first key</span>/1'/1'/0/*, <span class="key-placeholder">second key</span>/1'/1'/0/*)))<br>
sh(wsh(multi(2, <span class="key-placeholder">first key</span>/1'/1'/1/*, <span class="key-placeholder">second key</span>/1'/1'/1/*)))<br>
sh(wsh(multi(2, <span class="key-placeholder">first key</span>/1'/1'/2/*/*, <span class="key-placeholder">second key</span>/1'/1'/2/*/*)))<br>
wsh(multi(2, <span class="key-placeholder">first key</span>/1'/1'/0/*, <span class="key-placeholder">second key</span>/1'/1'/0/*))<br>
wsh(multi(2, <span class="key-placeholder">first key</span>/1'/1'/1/*, <span class="key-placeholder">second key</span>/1'/1'/1/*))<br>
wsh(multi(2, <span class="key-placeholder">first key</span>/1'/1'/2/*/*, <span class="key-placeholder">second key</span>/1'/1'/2/*/*))
<div class="backup">
<div class="intro">
{{.IconPadlock}}
<div class="text">
<h1>Encrypted backup</h1>
<h2>It can only be decrypted using your <strong>Recovery Code</strong>.</h2>
</div>
</div>
<div class="keys">
<div class="key">
<h3>First key</h3>
<p>{{.FirstEncryptedKey}}</p>
</div>
<div class="key">
<h3>Second key</h3>
<p>{{.SecondEncryptedKey}}</p>
</div>
<div class="date">
Created on <date>{{.CurrentDate}}</date>
</div>
</div>
</div>
<section class="instructions">
<h1>Instructions</h1>
<p>This emergency procedure will help you recover your funds if you are unable to use Muun on your phone.</p>
<div class="item">
<div class="number-box">
<div class="number">1</div>
</div>
<div class="text-box">
<h3>Find your Recovery Code</h3>
<p>You wrote this code on paper before creating your Emergency Kit. Youll need it later.</p>
</div>
</div>
<div class="item">
<div class="number-box">
<div class="number">2</div>
</div>
<div class="text-box">
<h3>Download the Recovery Tool</h3>
<p>Go to <a href="https://github.com/muun/recovery">github.com/muun/recovery</a> and download the tool on your computer.</p>
</div>
</div>
<div class="item">
<div class="number-box">
<div class="number">3</div>
</div>
<div class="text-box">
<h3>Recover your funds</h3>
<p>Run the Recovery Tool and follow the steps. It will safely transfer your funds to a Bitcoin address that you
choose.</p>
</div>
</div>
</section>
<section class="page-break-before">
<h2>Some questions you might have</h2>
<h3>Can I print this document?</h3>
<p>You can, but we recommend storing it online in a service such as Google Drive, iCloud, OneDrive or Dropbox.
These providers have earned their users' trust by being always available and safeguarding data with strong
security practices. They are also free.</p>
<p>If you decide to print it, be sure to keep it safely away from where you store your recovery code. Remember:
a person with both pieces can take control of your funds.</p>
<h3>What if I lose my emergency kit?</h3>
<p>Don't panic. Your money is not lost. It's all there, in the Bitcoin blockchain, waiting for you. Use our Android
or iOS applications and go to the Security Center to create a new kit.</p>
<h3>What if somebody sees this document?</h3>
<section class="help">
{{.IconHelp}}
<div class="text-box">
<h3>Need help?</h3>
<p>
As long as you keep your recovery code hidden, this document is harmless. All the data it contains is safely
encrypted, and only your recovery code can decrypt it to a usable form.</p>
<p>Still, we recommend that you keep it where only you can see it. If you really fear losing it or want to share it
for some other reason, only do so with people that enjoy your absolute trust.</p>
<h3>Why don't I have a mnemonic phrase?</h3>
<p>If you've been involved with Bitcoin for some time, you've probably seen mnemonics and been told to rely on them.
As of this writing, many wallets still use the technique.</p>
<p>There's nothing inherently wrong with mnemonics, but they have been rendered obsolete. The twelve words are
simply not enough to encode all the information a modern Bitcoin wallet requires to operate, and the problem will
only get worse as technology advances. Already there are improvements taking shape that would make mnemonic
recovery not only harder, but impossible.</p>
<p>For this reason, we decided to guarantee full ownership using a safer, more flexible and future-proof technique.
This way, we'll be able to keep up with technological improvements and continue to provide
state-of-the-art software.</p>
<h3>I have other questions</h3>
<p>We'll be glad to answer them. Contact us at <strong><a href="mailto:support@muun.com" >support@muun.com</a></strong>
to let us know.</p>
Contact us at <a href="mailto:support@muun.com">support@muun.com</a>. Were always there to help.
</p>
</div>
</section>
</body>
`
<section class="advanced page-break-before">
<h1>Advanced information</h1>
<h2>Output descriptors</h2>
<p>These descriptors, combined with your keys, specify how to locate your wallets funds on the Bitcoin blockchain.</p>
{{ if .Descriptors }}
{{.Descriptors}}
{{ else }}
<ul class="descriptors">
<!-- These lines are way too long, but dividing them introduces unwanted spaces -->
<li><span class="f">sh</span>(<span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">first key</span>/1'/1'/0/*, <span class="fp">second key</span>/1'/1'/0/*)))</li>
<li><span class="f">sh</span>(<span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">first key</span>/1'/1'/1/*, <span class="fp">second key</span>/1'/1'/1/*)))</li>
<li><span class="f">sh</span>(<span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">first key</span>/1'/1'/2/*/*, <span class="fp">second key</span>/1'/1'/2/*/*)))</li>
<li><span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">first key</span>/1'/1'/0/*, <span class="fp">second key</span>/1'/1'/0/*))</li>
<li><span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">first key</span>/1'/1'/1/*, <span class="fp">second key</span>/1'/1'/1/*))</li>
<li><span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">first key</span>/1'/1'/2]/*/*, <span class="fp">second key</span>/1'/1'/2/*/*))</li>
</ul>
{{ end }}
<p>
Output descriptors are part of a developing standard for Recovery that Muun intends to support and is helping grow.
Since the standard is in a very early stage, the list above includes some non-standard elements.
</p>
<p>
When descriptors reach a more mature stage, youll be able to take your funds from one wallet to another with
complete independence. Muun believes this freedom is at the core of Bitcoins promise, and is working towards
that goal.
</p>
</section>
`
const contentES = `
<header>
<div class="title">
<h1>Kit de Emergencia</h1>
<date>Creado el {{.CurrentDate}}</date>
</div>
<div class="verification-code">
<h4>Código de Verificación</h4>
<code>{{.VerificationCode}}</code>
</div>
<h1>Kit de Emergencia</h1>
<h2>Verificación <span class="verification-code">#{{.VerificationCode}}</span></p>
</header>
<section>
<h2>Sobre este documento</h2>
<p>Aquí encontrarás la información encriptada que necesitas para transferir tu dinero fuera de tu billetera Muun
sin requerir colaboración de nadie, incluso del software y los servicios de Muun.</p>
<p>Ésto incluye todas tus claves privadas (encriptadas de forma segura con tu Recovery Code) y algo de información
adicional relacionada a tu billetera.</p>
<p>Con éste documento y to Código de Recuperación a mano, tienes posesión total de tu dinero. Nadie más tiene
todas las piezas. Bitcoin fue creado para esto: darle a la gente control total sobre el dinero que les pertenece.</p>
</section>
<section>
<h2>Recuperando tu dinero</h2>
<p>Para proceder con la transferencia de tus fondos, recomendamos usar nuestra
<a href="https://github.com/muun/recovery">Herramienta de Recuperación de código abierto</a>. Está disponible para que
todo el mundo la descargue y la examine, y siempre lo estará.</p>
<p>La creamos para asistirte en el proceso, pero nada te impide hacerlo manualmente si prefieres.</p>
<p>Entra en <strong>github.com/muun/recovery</strong> y sigue las instrucciones para transferir tu dinero a una
dirección de Bitcoin que elijas.</p>
</section>
<section>
<h2>Información de recuperación</h2>
<p>Ésto es lo que necesitas para la transferencia, además de tu Código de Recuperación. Si éstos códigos te parecen
confusos, no te precupes: la <a href="https://github.com/muun/recovery">Herramienta de Recuperación</a> se hará cargo
de todo</p>
</section>
<section>
<h3>Primera Clave Privada Encriptada</h3>
<div class="data">{{.FirstEncryptedKey}}</div>
<h3>Segunda Clave Privada Encriptada</h3>
<div class="data">{{.SecondEncryptedKey}}</div>
<h3>Descriptores de outputs</h3>
<div class="data">
sh(wsh(multi(2, <span class="key-placeholder">primera clave</span>/1'/1'/0/*, <span class="key-placeholder">segunda clave</span>/1'/1'/0/*)))<br>
sh(wsh(multi(2, <span class="key-placeholder">primera clave</span>/1'/1'/1/*, <span class="key-placeholder">segunda clave</span>/1'/1'/1/*)))<br>
sh(wsh(multi(2, <span class="key-placeholder">primera clave</span>/1'/1'/2/*/*, <span class="key-placeholder">segunda clave</span>/1'/1'/2/*/*)))<br>
wsh(multi(2, <span class="key-placeholder">primera clave</span>/1'/1'/0/*, <span class="key-placeholder">segunda clave</span>/1'/1'/0/*))<br>
wsh(multi(2, <span class="key-placeholder">primera clave</span>/1'/1'/1/*, <span class="key-placeholder">segunda clave</span>/1'/1'/1/*))<br>
wsh(multi(2, <span class="key-placeholder">primera clave</span>/1'/1'/2/*/*, <span class="key-placeholder">segunda clave</span>/1'/1'/2/*/*))
<div class="backup">
<div class="intro">
{{.IconPadlock}}
<div class="text">
<h1>Respaldo encriptado</h1>
<h2>Sólo puede ser desencriptado con tu <strong>Código de Recuperación</strong>.</h2>
</div>
</div>
<div class="keys">
<div class="key">
<h3>Primera clave</h3>
<p>{{.FirstEncryptedKey}}</p>
</div>
<div class="key">
<h3>Segunda clave</h3>
<p>{{.SecondEncryptedKey}}</p>
</div>
<div class="date">
Creado el <date>{{.CurrentDate}}</date>
</div>
</div>
</div>
<section class="instructions">
<h1>Instrucciones</h1>
<p>Éste procedimiento de emergencia te ayudará a recuperar tus fondos si no puedes usar Muun en tu teléfono.</p>
<div class="item">
<div class="number-box">
<div class="number">1</div>
</div>
<div class="text-box">
<h3>Encuentra tu Código de Recuperación</h3>
<p>Lo escribiste en papel antes de crear tu Kit de Emergencia. Lo necesitarás después.</p>
</div>
</div>
<div class="item">
<div class="number-box">
<div class="number">2</div>
</div>
<div class="text-box">
<h3>Descarga la Herramienta de Recuperación</h3>
<p>Ingresa en <a href="github.com/muun/recovery">github.com/muun/recovery</a> y descarga la herramienta en tu computadora..</p>
</div>
</div>
<div class="item">
<div class="number-box">
<div class="number">3</div>
</div>
<div class="text-box">
<h3>Recupera tus fondos</h3>
<p>Ejecuta la Herramienta de Recuperación y sigue los pasos. Transferirá tus fondos a una dirección de Bitcoin que elijas.</p>
</div>
</div>
</section>
<section class="page-break-before">
<h2>Algunas preguntas que puedes tener</h2>
<h3>¿Puedo imprimir éste documento?</h3>
<p>Puedes, pero recomendamos almacenarlo online, en algún servicio como Google Drive, iCloud, OneDrive o Dropbox.
Éstos proveedores se han ganado la confianza de sus usuarios por estar siempre disponibles y custodiar su información
con fuertes prácticas de seguridad. También son gratuitos.</p>
<p>Si decides imprimirlos, asegúrate de guardarlos en algún lugar seguro y lejos de tu Código de Recuperación. Recuerda:
una persona con ambas piezas puede tomar control de tus fondos.</p>
<h3>¿Qué pasa si pierdo mi Kit de Emergencia?</h3>
<p>No te preocupes. Tu dinero no está perdido. Está todo ahí, en la blockchain de Bitcoin, esperándote. Usa nuestras
aplicaciones de Android o iOS y crea un nuevo Kit en el Centro de Seguridad.</p>
<h3>¿Qué pasa si alguien ve éste documento?</h3>
<p>Mientras tengas tu Código de Recuperación escondido, éste documento es inofensivo. Todo la información que contiene
está encriptada de forma segura, y sólo tu Código de Recuperación puede desencriptarla para poder usarla.</p>
<p>Aún así, recomendamos que lo guardes donde sólo puedes verlo. Si realmente te preocupa perderlo o quieres
compartirlo con alguien por otra razón, sólo hazlo con gente de plena confianza.</p>
<h3>¿Por qué no tengo mi mnemonic?</h3>
<p>Si estás familiarizado con Bitcoin, probablemente hayas visto las mnemonics y aprendido a confiar en ellas.
Al día de hoy, muchas billeteras utilizan esa técnica.</p>
<p>Las mnemonics no tienen ningún problema intrínseco, pero han quedado obsoletas. Las doce palabras no son suficientes para codificar
toda la información que una billetera moderna de Bitcoin necesita para funcionar, y el problema sólo se pondrá peor
a medida que la tecnología avance. Ya hay mejoras encaminadas que harían la recuperación con mnemonics no sólo
difícil, sino imposible.</p>
<p>Por eso decidimos garantizar la posesión completa con un método más seguro, flexible y capaz de evolucionar.
De ésta manera, podremos seguir mejorando nuestra tecnología y continuar modernizando nuestro software.</p>
<h3>Tengo otras preguntas</h3>
<p>Siempre estamos disponibles para contestarlas. Contáctanos a <strong><a href="mailto:support@muun.com" >support@muun.com</a></strong>
y te ayudaremos.</p>
<section class="help">
{{.IconHelp}}
<div class="text-box">
<h3>¿Necesitas ayuda?</h3>
<p>
Contáctanos en <a href="mailto:support@muun.com">support@muun.com</a>. Siempre estamos disponibles para ayudar.
</p>
</div>
</section>
</body>
`
<section class="advanced page-break-before">
<h1>Información Avanzada</h1>
<h2>Output descriptors</h2>
<p>Estos descriptors, combinados con tus claves, indican cómo encontrar los fondos de tu billetera en la blockchain de Bitcoin.</p>
{{ if .Descriptors }}
{{.Descriptors}}
{{ else }}
<ul class="descriptors">
<!-- These lines are way too long, but dividing them introduces unwanted spaces -->
<li><span class="f">sh</span>(<span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">primera clave</span>/1'/1'/0/*, <span class="fp">segunda clave</span>/1'/1'/0/*)))</li>
<li><span class="f">sh</span>(<span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">primera clave</span>/1'/1'/1/*, <span class="fp">segunda clave</span>/1'/1'/1/*)))</li>
<li><span class="f">sh</span>(<span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">primera clave</span>/1'/1'/2/*/*, <span class="fp">segunda clave</span>/1'/1'/2/*/*)))</li>
<li><span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">primera clave</span>/1'/1'/0/*, <span class="fp">segunda clave</span>/1'/1'/0/*))</li>
<li><span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">primera clave</span>/1'/1'/1/*, <span class="fp">segunda clave</span>/1'/1'/1/*))</li>
<li><span class="f">wsh</span>(<span class="f">multi</span>(2, <span class="fp">primera clave</span>/1'/1'/2]/*/*, <span class="fp">segunda clave</span>/1'/1'/2/*/*))</li>
</ul>
{{ end }}
<p>
Los output descriptors son parte de un estándar de recuperación actualmente en desarrollo. Muun tiene la intención
de soportar este estándar y apoyar su crecimiento. Dado que se encuentra en una etapa muy temprana, la siguiente lista
incluye algunos elementos que aún no están estandarizados.
</p>
<p>
Cuando los descriptors lleguen a una etapa más madura, podrás llevar tus fondos de una billetera a la otra con completa
independencia. Muun cree que ésta libertad es central a la promesa de Bitcoin, y está trabajando para que eso suceda.
</p>
</section>
`
const iconHelp = `
<svg width="72" height="72" viewBox="0 0 72 72" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0)">
<g filter="url(#filter0_d)">
<circle cx="36" cy="36" r="28" fill="white"/>
</g>
<path d="M51.9762 41.7833L51.9999 28.1164C52.005 27.3149 51.8507 26.5203 51.5461 25.7789C51.2414 25.0374 50.7924 24.3638 50.2252 23.7972C49.6572 23.2232 48.9802 22.7686 48.2338 22.4599C47.4874 22.1513 46.6869 21.995 45.8791 22.0001H26.1208C24.4981 22.0022 22.9424 22.6473 21.795 23.7938C20.6476 24.9404 20.0021 26.4949 20 28.1164V41.2789C20.0021 42.9004 20.6476 44.4548 21.795 45.6014C22.9424 46.748 24.4981 47.393 26.1208 47.3951H26.5625V51.1547C26.5578 51.7232 26.7257 52.2798 27.0439 52.7512C27.3621 53.2225 27.8158 53.5865 28.3451 53.7951C28.6816 53.9281 29.04 53.9976 29.402 54C29.7741 53.9998 30.1425 53.9251 30.4852 53.7803C30.828 53.6354 31.1382 53.4234 31.3975 53.1567L36.6901 47.3951L48.1895 46.2128L51.9762 41.7833ZM35.8698 45.3774L29.7175 51.4778C29.6594 51.5449 29.5815 51.5917 29.495 51.6116C29.4085 51.6314 29.3179 51.6232 29.2363 51.5882C29.1493 51.5551 29.075 51.4953 29.024 51.4175C28.973 51.3396 28.948 51.2476 28.9524 51.1547V46.2128C28.9524 45.8993 28.8277 45.5986 28.6059 45.3769C28.384 45.1551 28.083 45.0306 27.7693 45.0306H26.1208C25.125 45.0306 24.17 44.6353 23.4659 43.9317C22.7618 43.2281 22.3663 42.2739 22.3663 41.2789V28.1164C22.3663 27.1213 22.7618 26.1671 23.4659 25.4635C24.17 24.7599 25.125 24.3646 26.1208 24.3646H45.8791C46.372 24.3641 46.86 24.4614 47.315 24.6508C47.7699 24.8402 48.1827 25.118 48.5293 25.4681C48.8797 25.8145 49.1577 26.227 49.3472 26.6816C49.5368 27.1362 49.6341 27.6239 49.6336 28.1164V41.2789C49.6336 42.2739 49.238 43.2281 48.5339 43.9317C47.8298 44.6353 46.8749 45.0306 45.8791 45.0306H36.6901C36.3764 45.0309 36.0757 45.1556 35.854 45.3774H35.8698ZM36.6901 47.3951H45.8791C47.4141 47.3926 48.8922 46.8146 50.0211 45.7755C51.1501 44.7364 51.8478 43.3117 51.9762 41.7833L48.1895 46.2128L36.6901 47.3951Z" fill="#2474CD"/>
<path d="M34.708 37.1242C34.612 37.1242 34.528 37.0942 34.456 37.034C34.384 36.9619 34.348 36.8777 34.348 36.7815V36.3666C34.432 35.8615 34.618 35.4105 34.906 35.0136C35.206 34.6168 35.614 34.1658 36.13 33.6607C36.514 33.2758 36.802 32.9631 36.994 32.7226C37.186 32.4701 37.288 32.2175 37.3 31.965C37.336 31.5922 37.21 31.2975 36.922 31.081C36.646 30.8525 36.31 30.7383 35.914 30.7383C34.978 30.7383 34.402 31.1893 34.186 32.0912C34.09 32.3799 33.904 32.5242 33.628 32.5242H31.432C31.3 32.5242 31.192 32.4821 31.108 32.3979C31.036 32.3017 31 32.1814 31 32.0371C31.024 31.3757 31.234 30.7503 31.63 30.161C32.026 29.5597 32.608 29.0727 33.376 28.6998C34.144 28.327 35.062 28.1406 36.13 28.1406C37.222 28.1406 38.11 28.315 38.794 28.6638C39.478 29.0005 39.964 29.4214 40.252 29.9265C40.552 30.4196 40.702 30.9247 40.702 31.4418C40.702 32.0311 40.564 32.5482 40.288 32.9932C40.024 33.4382 39.628 33.9493 39.1 34.5266C38.776 34.8753 38.518 35.17 38.326 35.4105C38.146 35.651 38.008 35.9036 37.912 36.1681C37.876 36.2764 37.834 36.4387 37.786 36.6552C37.69 36.8236 37.606 36.9438 37.534 37.016C37.462 37.0881 37.36 37.1242 37.228 37.1242H34.708ZM34.744 40.9486C34.612 40.9486 34.504 40.9065 34.42 40.8223C34.336 40.7381 34.294 40.6299 34.294 40.4976V38.4411C34.294 38.3088 34.336 38.2006 34.42 38.1164C34.504 38.0322 34.612 37.9901 34.744 37.9901H37.048C37.18 37.9901 37.288 38.0322 37.372 38.1164C37.468 38.2006 37.516 38.3088 37.516 38.4411V40.4976C37.516 40.6299 37.468 40.7381 37.372 40.8223C37.288 40.9065 37.18 40.9486 37.048 40.9486H34.744Z" fill="#182449"/>
</g>
<defs>
<filter id="filter0_d" x="0" y="4" width="72" height="72" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"/>
<feOffset dy="4"/>
<feGaussianBlur stdDeviation="4"/>
<feColorMatrix type="matrix" values="0 0 0 0 0.124943 0 0 0 0 0.228158 0 0 0 0 0.346117 0 0 0 0.05 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow" result="shape"/>
</filter>
<clipPath id="clip0">
<rect width="72" height="72" fill="white"/>
</clipPath>
</defs>
</svg>
`
const iconPadlock = `
<svg width="72" height="72" viewBox="0 0 72 72" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0)">
<g filter="url(#filter0_dd)">
<g filter="url(#filter1_i)">
<path d="M48.7367 30.2734H23.2633C21.461 30.2734 20 31.7345 20 33.5367V53.192C20 54.9942 21.461 56.4553 23.2633 56.4553H48.7367C50.539 56.4553 52 54.9942 52 53.192V33.5367C52 31.7345 50.539 30.2734 48.7367 30.2734Z" fill="url(#paint0_linear)"/>
</g>
<path d="M38.9119 41.1786C38.9119 42.7853 37.6095 44.0877 36.0028 44.0877C34.3962 44.0877 33.0938 42.7853 33.0938 41.1786C33.0938 39.572 34.3962 38.2695 36.0028 38.2695C37.6095 38.2695 38.9119 39.572 38.9119 41.1786Z" fill="url(#paint1_radial)"/>
<path d="M34.4106 43.9113C34.4915 43.5876 34.7824 43.3604 35.1161 43.3604H36.8895C37.2233 43.3604 37.5142 43.5876 37.5951 43.9113L38.686 48.275C38.8008 48.734 38.4536 49.1786 37.9805 49.1786H34.0252C33.5521 49.1786 33.2049 48.734 33.3197 48.275L34.4106 43.9113Z" fill="url(#paint2_radial)"/>
<g filter="url(#filter2_i)">
<path d="M25.0906 24.8182V30.2727H29.0906V24.8182C29.0906 22.8788 30.4724 19 35.9997 19C41.5269 19 42.9088 22.8788 42.9088 24.8182V30.2727H46.9088V24.8182C46.9088 21.5455 44.7269 15 35.9997 15C27.2724 15 25.0906 21.5455 25.0906 24.8182Z" fill="#2573F7"/>
<path d="M25.0906 24.8182V30.2727H29.0906V24.8182C29.0906 22.8788 30.4724 19 35.9997 19C41.5269 19 42.9088 22.8788 42.9088 24.8182V30.2727H46.9088V24.8182C46.9088 21.5455 44.7269 15 35.9997 15C27.2724 15 25.0906 21.5455 25.0906 24.8182Z" fill="url(#paint3_linear)"/>
</g>
</g>
</g>
<defs>
<filter id="filter0_dd" x="7.99957" y="8.99978" width="56.0009" height="65.4561" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"/>
<feOffset dy="6.00022"/>
<feGaussianBlur stdDeviation="6.00022"/>
<feColorMatrix type="matrix" values="0 0 0 0 0.340702 0 0 0 0 0.386926 0 0 0 0 0.529451 0 0 0 0.3 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"/>
<feOffset dy="1.50005"/>
<feGaussianBlur stdDeviation="1.50005"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="effect1_dropShadow" result="effect2_dropShadow"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect2_dropShadow" result="shape"/>
</filter>
<filter id="filter1_i" x="19.5921" y="29.4576" width="32.4079" height="26.9976" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dx="-0.40791" dy="-0.815819"/>
<feGaussianBlur stdDeviation="0.815819"/>
<feComposite in2="hardAlpha" operator="arithmetic" k2="-1" k3="1"/>
<feColorMatrix type="matrix" values="0 0 0 0 0.25098 0 0 0 0 0.380392 0 0 0 0 0.552941 0 0 0 1 0"/>
<feBlend mode="normal" in2="shape" result="effect1_innerShadow"/>
</filter>
<filter id="filter2_i" x="24.7156" y="14.625" width="22.1932" height="15.6477" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feBlend mode="normal" in="SourceGraphic" in2="BackgroundImageFix" result="shape"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dx="-0.375014" dy="-0.375014"/>
<feGaussianBlur stdDeviation="0.375014"/>
<feComposite in2="hardAlpha" operator="arithmetic" k2="-1" k3="1"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="shape" result="effect1_innerShadow"/>
</filter>
<linearGradient id="paint0_linear" x1="25.8754" y1="40.3205" x2="64.6733" y2="80.0278" gradientUnits="userSpaceOnUse">
<stop stop-color="#91ACC9"/>
<stop offset="0.561326" stop-color="#3D5F8C"/>
</linearGradient>
<radialGradient id="paint1_radial" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(36.0028 43.7241) rotate(90) scale(5.45455 2.90909)">
<stop stop-color="#0B141D"/>
<stop offset="1" stop-color="#27394D"/>
</radialGradient>
<radialGradient id="paint2_radial" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(36.0028 43.7241) rotate(90) scale(5.45455 2.90909)">
<stop stop-color="#0B141D"/>
<stop offset="1" stop-color="#27394D"/>
</radialGradient>
<linearGradient id="paint3_linear" x1="35.9997" y1="26.0114" x2="35.9997" y2="34.4318" gradientUnits="userSpaceOnUse">
<stop stop-color="#435F7D"/>
<stop offset="1" stop-color="#213953"/>
</linearGradient>
<clipPath id="clip0">
<rect width="72" height="72" fill="white"/>
</clipPath>
</defs>
</svg>
`

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,171 @@
package emergencykit
import (
"fmt"
"strings"
)
type DescriptorsData struct {
FirstFingerprint string
SecondFingerprint string
}
// Output descriptors shown in the PDF do not include legacy descriptors no longer in use. We leave
// the decision of whether to scan them to the Recovery Tool.
var descriptorFormats = []string{
"sh(wsh(multi(2, %s/1'/1'/0/*, %s/1'/1'/0/*)))", // V3 change
"sh(wsh(multi(2, %s/1'/1'/1/*, %s/1'/1'/1/*)))", // V3 external
"wsh(multi(2, %s/1'/1'/0/*, %s/1'/1'/0/*))", // V4 change
"wsh(multi(2, %s/1'/1'/1/*, %s/1'/1'/1/*))", // V4 external
}
// GetDescriptors returns an array of raw output descriptors.
func GetDescriptors(data *DescriptorsData) []string {
var descriptors []string
for _, descriptorFormat := range descriptorFormats {
descriptor := fmt.Sprintf(descriptorFormat, data.FirstFingerprint, data.SecondFingerprint)
checksum := calculateChecksum(descriptor)
descriptors = append(descriptors, descriptor+"#"+checksum)
}
return descriptors
}
// GetDescriptorsHTML returns the HTML for the output descriptor list in the Emergency Kit.
func GetDescriptorsHTML(data *DescriptorsData) string {
descriptors := GetDescriptors(data)
var itemsHTML []string
for _, descriptor := range descriptors {
descriptor, checksum := splitChecksum(descriptor)
html := descriptor
// Replace script type expressions (parenthesis in match prevent replacing the "sh" in "wsh")
html = strings.ReplaceAll(html, "wsh(", renderScriptType("wsh")+"(")
html = strings.ReplaceAll(html, "sh(", renderScriptType("sh")+"(")
html = strings.ReplaceAll(html, "multi(", renderScriptType("multi")+"(")
// Replace fingerprint expressions:
html = strings.ReplaceAll(html, data.FirstFingerprint, renderFingerprint(data.FirstFingerprint))
html = strings.ReplaceAll(html, data.SecondFingerprint, renderFingerprint(data.SecondFingerprint))
// Add checksum and wrap everything:
html += renderChecksum(checksum)
html = renderItem(html)
itemsHTML = append(itemsHTML, html)
}
return renderList(itemsHTML)
}
func renderList(itemsHTML []string) string {
return fmt.Sprintf(`<ul class="descriptors">%s</ul>`, strings.Join(itemsHTML, "\n"))
}
func renderItem(innerHTML string) string {
return fmt.Sprintf(`<li>%s</li>`, innerHTML)
}
func renderScriptType(scriptType string) string {
return fmt.Sprintf(`<span class="f">%s</span>`, scriptType)
}
func renderFingerprint(fingerprint string) string {
return fmt.Sprintf(`<span class="fp">%s</span>`, fingerprint)
}
func renderChecksum(checksum string) string {
return fmt.Sprintf(`#<span class="checksum">%s</span>`, checksum)
}
func splitChecksum(descriptor string) (string, string) {
parts := strings.Split(descriptor, "#")
if len(parts) == 1 {
return parts[0], ""
}
return parts[0], parts[1]
}
// -------------------------------------------------------------------------------------------------
// WARNING:
// Below this point, you may find only fear and confusion.
// I translated the code for computing checksums from the original C++ in the bitcoind source,
// making a few adjustments for language differences. It's a specialized algorithm for the domain of
// output descriptors, and it uses the same primitives as the bech32 encoding.
var inputCharset = "0123456789()[],'/*abcdefgh@:$%{}IJKLMNOPQRSTUVWXYZ&+-.;<=>?!^_|~ijklmnopqrstuvwxyzABCDEFGH`#\"\\ "
var checksumCharset = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
func calculateChecksum(desc string) string {
var c uint64 = 1
var cls int = 0
var clscount int = 0
for _, ch := range desc {
pos := strings.IndexRune(inputCharset, ch)
if pos == -1 {
return ""
}
c = polyMod(c, pos&31)
cls = cls*3 + (pos >> 5)
clscount++
if clscount == 3 {
c = polyMod(c, cls)
cls = 0
clscount = 0
}
}
if clscount > 0 {
c = polyMod(c, cls)
}
for i := 0; i < 8; i++ {
c = polyMod(c, 0)
}
c ^= 1
ret := make([]byte, 8)
for i := 0; i < 8; i++ {
ret[i] = checksumCharset[(c>>(5*(7-i)))&31]
}
return string(ret)
}
func polyMod(c uint64, intVal int) uint64 {
val := uint64(intVal)
c0 := c >> 35
c = ((c & 0x7ffffffff) << 5) ^ val
if c0&1 != 0 {
c ^= 0xf5dee51989
}
if c0&2 != 0 {
c ^= 0xa9fdca3312
}
if c0&4 != 0 {
c ^= 0x1bab10e32d
}
if c0&8 != 0 {
c ^= 0x3706b1677a
}
if c0&16 != 0 {
c ^= 0x644d626ffd
}
return c
}

View File

@ -2,8 +2,9 @@ package emergencykit
import (
"bytes"
"crypto/rand"
"crypto/sha256"
"fmt"
"strconv"
"text/template"
"time"
)
@ -11,7 +12,9 @@ import (
// Input struct to fill the PDF
type Input struct {
FirstEncryptedKey string
FirstFingerprint string
SecondEncryptedKey string
SecondFingerprint string
}
// Output with the html as string and the verification code
@ -20,24 +23,57 @@ type Output struct {
VerificationCode string
}
var spanishMonthNames = []string{
"Enero",
"Febrero",
"Marzo",
"Abril",
"Mayo",
"Junio",
"Julio",
"Agosto",
"Septiembre",
"Octubre",
"Noviembre",
"Diciembre",
}
// GenerateHTML returns the translated emergency kit html as a string along with the verification code.
func GenerateHTML(params *Input, lang string) (*Output, error) {
verificationCode := randomCode(6)
verificationCode := generateDeterministicCode(params)
// Render output descriptors:
var descriptors string
if params.hasFingerprints() {
descriptors = GetDescriptorsHTML(&DescriptorsData{
FirstFingerprint: params.FirstFingerprint,
SecondFingerprint: params.SecondFingerprint,
})
}
// Render page body:
content, err := render("EmergencyKitContent", lang, &contentData{
// Externally provided:
FirstEncryptedKey: params.FirstEncryptedKey,
SecondEncryptedKey: params.SecondEncryptedKey,
VerificationCode: verificationCode,
// Careful: do not change these format values. See this doc for more info: https://golang.org/pkg/time/#pkg-constants
CurrentDate: time.Now().Format("2006/01/02"), // Format date to YYYY/MM/DD
// Computed by us:
VerificationCode: verificationCode,
CurrentDate: formatDate(time.Now(), lang),
Descriptors: descriptors,
// Template pieces separated for reuse:
IconHelp: iconHelp,
IconPadlock: iconPadlock,
})
if err != nil {
return nil, fmt.Errorf("failed to render EmergencyKitContent template: %w", err)
}
// Render complete HTML page:
page, err := render("EmergencyKitPage", lang, &pageData{
Css: css,
Logo: logo,
Content: content,
})
if err != nil {
@ -50,17 +86,40 @@ func GenerateHTML(params *Input, lang string) (*Output, error) {
}, nil
}
func randomCode(length int) string {
result := make([]byte, length)
_, err := rand.Read(result)
if err != nil {
panic(err)
func formatDate(t time.Time, lang string) string {
if lang == "en" {
return t.Format("January 2, 2006")
} else {
// Golang has no i18n facilities, so we do our own formatting.
year, month, day := t.Date()
monthName := spanishMonthNames[month-1]
return fmt.Sprintf("%d de %s, %d", day, monthName, year)
}
charset := "0123456789"
for i := 0; i < length; i++ {
result[i] = charset[int(result[i])%len(charset)]
}
func generateDeterministicCode(params *Input) string {
// NOTE:
// This function creates a stable verification code given the inputs to render the Emergency Kit. For now, the
// implementation relies exclusively on the SecondEncryptedKey, which is the Muun key. This is obviously not ideal,
// since we're both dropping part of the input and introducing the assumption that the Muun key will always be
// rendered second -- but it compensates for a problem with one of our clients that causes the user key serialization
// to be recreated each time the kit is rendered (making this deterministic approach useless).
// Create a deterministic serialization of the input:
inputMaterial := params.SecondEncryptedKey
// Compute a cryptographically secure hash of the material (critical, these are keys):
inputHash := sha256.Sum256([]byte(inputMaterial))
// Extract a verification code from the hash (doesn't matter if we discard bytes):
var code string
for _, b := range inputHash[:6] {
code += strconv.Itoa(int(b) % 10)
}
return string(result)
return code
}
func render(name, language string, data interface{}) (string, error) {
@ -80,12 +139,18 @@ func getContent(name string, language string) string {
switch name {
case "EmergencyKitPage":
return page
case "EmergencyKitContent":
if language == "es" {
return contentES
}
return contentEN
default:
panic("could not find template with name: " + name)
}
}
func (i *Input) hasFingerprints() bool {
return i.FirstFingerprint != "" && i.SecondFingerprint != ""
}

View File

@ -0,0 +1,145 @@
package emergencykit
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"github.com/pdfcpu/pdfcpu/pkg/api"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
)
// MetadataReader can extract the metadata file from a PDF.
type MetadataReader struct {
SrcFile string
}
// MetadataWriter can add the metadata file to a PDF.
type MetadataWriter struct {
SrcFile string
DstFile string
}
// Metadata holds the machine-readable data for an Emergency Kit.
type Metadata struct {
Version int `json:"version"`
BirthdayBlock int `json:"birthdayBlock"`
EncryptedKeys []*MetadataKey `json:"encryptedKeys"`
OutputDescriptors []string `json:"outputDescriptors"`
}
// MetadataKey holds an entry in the Metadata key array.
type MetadataKey struct {
DhPubKey string `json:"dhPubKey"`
EncryptedPrivKey string `json:"encryptedPrivKey"`
Salt string `json:"salt"`
}
// The name for the embedded metadata file in the PDF document:
const metadataName = "metadata.json"
// Default configuration values copied from pdfcpu source code (some values are irrelevant to us):
var pdfConfig = &pdfcpu.Configuration{
Reader15: true,
DecodeAllStreams: false,
ValidationMode: pdfcpu.ValidationRelaxed,
Eol: pdfcpu.EolLF,
WriteObjectStream: true,
WriteXRefStream: true,
EncryptUsingAES: true,
EncryptKeyLength: 256,
Permissions: pdfcpu.PermissionsNone,
}
// HasMetadata returns whether the metadata is present (and alone) in SrcFile.
func (mr *MetadataReader) HasMetadata() (bool, error) {
fs, err := api.ListAttachmentsFile(mr.SrcFile, pdfConfig)
if err != nil {
return false, fmt.Errorf("HasMetadata failed to list attachments: %w", err)
}
return len(fs) == 1 && fs[0] == metadataName, nil
}
// ReadMetadata returns the deserialized metadata file embedded in the SrcFile PDF.
func (mr *MetadataReader) ReadMetadata() (*Metadata, error) {
// NOTE:
// Due to library constraints, this makes use of a temporary directory in the default system temp
// location, which for the Recovery Tool will always be accessible. If we eventually want to read
// this metadata in mobile clients, we'll need the caller to provide a directory.
// Before we begin, verify that the metadata file is embedded:
hasMetadata, err := mr.HasMetadata()
if err != nil {
return nil, fmt.Errorf("ReadMetadata failed to check for existence: %w", err)
}
if !hasMetadata {
return nil, fmt.Errorf("ReadMetadata didn't find %s (or found more) in this PDF", metadataName)
}
// Create the temporary directory, with a deferred call to clean up:
tmpDir, err := ioutil.TempDir("", "ek-metadata-*")
if err != nil {
return nil, fmt.Errorf("ReadMetadata failed to create a temporary directory")
}
defer os.RemoveAll(tmpDir)
// Extract the embedded attachment from the PDF into that directory:
err = api.ExtractAttachmentsFile(mr.SrcFile, tmpDir, []string{metadataName}, pdfConfig)
if err != nil {
return nil, fmt.Errorf("ReadMetadata failed to extract attachment: %w", err)
}
// Read the contents of the file:
metadataBytes, err := ioutil.ReadFile(filepath.Join(tmpDir, metadataName))
if err != nil {
return nil, fmt.Errorf("ReadMetadata failed to read the extracted file: %w", err)
}
// Deserialize the metadata:
var metadata Metadata
err = json.Unmarshal(metadataBytes, &metadata)
if err != nil {
return nil, fmt.Errorf("ReadMetadata failed to unmarshal %s: %w", string(metadataBytes), err)
}
// Done we are!
return &metadata, nil
}
// WriteMetadata creates a copy of SrcFile with attached JSON metadata into DstFile.
func (mw *MetadataWriter) WriteMetadata(metadata *Metadata) error {
// NOTE:
// Due to library constraints, this makes use of a temporary file placed in the same directory as
// `SrcFile`, which is assumed to be writable. This is a much safer bet than attempting to pick a
// location for temporary files ourselves.
// Decide the location of the temporary file:
srcDir := filepath.Dir(mw.SrcFile)
tmpFile := filepath.Join(srcDir, metadataName)
// Serialize the metadata:
metadataBytes, err := json.Marshal(metadata)
if err != nil {
return fmt.Errorf("WriteMetadata failed to marshal: %w", err)
}
// Write to the temporary file, with a deferred call to clean up:
err = ioutil.WriteFile(tmpFile, metadataBytes, os.FileMode(0600))
if err != nil {
return fmt.Errorf("WriteMetadata failed to write a temporary file: %w", err)
}
defer os.Remove(tmpFile)
// Add the attachment, returning potential errors:
err = api.AddAttachmentsFile(mw.SrcFile, mw.DstFile, []string{tmpFile}, false, pdfConfig)
if err != nil {
return fmt.Errorf("WriteMetadata failed to add attachment file %s: %w", tmpFile, err)
}
return nil
}

View File

@ -7,6 +7,8 @@ import (
"crypto/rand"
"crypto/sha256"
"encoding/binary"
"errors"
"fmt"
"io"
"math"
"math/big"
@ -15,7 +17,6 @@ import (
"github.com/btcsuite/btcd/btcec"
"github.com/btcsuite/btcutil/base58"
"github.com/pkg/errors"
)
const serializedPublicKeyLength = btcec.PubKeyBytesLenCompressed
@ -47,18 +48,18 @@ type hdPubKeyEncrypter struct {
func addVariableBytes(writer io.Writer, data []byte) error {
if len(data) > math.MaxUint16 {
return errors.Errorf("data length can't exceeed %v", math.MaxUint16)
return fmt.Errorf("data length can't exceeed %v", math.MaxUint16)
}
dataLen := uint16(len(data))
err := binary.Write(writer, binary.BigEndian, &dataLen)
if err != nil {
return errors.Wrapf(err, "failed to write var bytes len")
return fmt.Errorf("failed to write var bytes len: %w", err)
}
n, err := writer.Write(data)
if err != nil || n != len(data) {
return errors.Errorf("failed to write var bytes")
return errors.New("failed to write var bytes")
}
return nil
@ -88,12 +89,12 @@ func (e *hdPubKeyEncrypter) Encrypt(payload []byte) (string, error) {
signingKey, err := e.senderKey.key.ECPrivKey()
if err != nil {
return "", errors.Wrapf(err, "Encrypt: failed to extract signing key")
return "", fmt.Errorf("Encrypt: failed to extract signing key: %w", err)
}
encryptionKey, err := e.receiverKey.key.ECPubKey()
if err != nil {
return "", errors.Wrapf(err, "Encrypt: failed to extract pub key")
return "", fmt.Errorf("Encrypt: failed to extract pub key: %w", err)
}
// Sign "payload || encryptionKey" to protect against payload reuse by 3rd parties
@ -103,34 +104,34 @@ func (e *hdPubKeyEncrypter) Encrypt(payload []byte) (string, error) {
hash := sha256.Sum256(signaturePayload)
senderSignature, err := btcec.SignCompact(btcec.S256(), signingKey, hash[:], false)
if err != nil {
return "", errors.Wrapf(err, "Encrypt: failed to sign payload")
return "", fmt.Errorf("Encrypt: failed to sign payload: %w", err)
}
// plaintext is "senderSignature || payload"
plaintext := bytes.NewBuffer(make([]byte, 0, 2+len(payload)+2+len(senderSignature)))
err = addVariableBytes(plaintext, senderSignature)
if err != nil {
return "", errors.Wrapf(err, "Encrypter: failed to add senderSignature")
return "", fmt.Errorf("Encrypter: failed to add senderSignature: %w", err)
}
err = addVariableBytes(plaintext, payload)
if err != nil {
return "", errors.Wrapf(err, "Encrypter: failed to add payload")
return "", fmt.Errorf("Encrypter: failed to add payload: %w", err)
}
pubEph, sharedSecret, err := generateSharedEncryptionSecretForAES(encryptionKey)
if err != nil {
return "", errors.Wrapf(err, "Encrypt: failed to generate shared encryption key")
return "", fmt.Errorf("Encrypt: failed to generate shared encryption key: %w", err)
}
blockCipher, err := aes.NewCipher(sharedSecret)
if err != nil {
return "", errors.Wrapf(err, "Encrypt: new aes failed")
return "", fmt.Errorf("Encrypt: new aes failed: %w", err)
}
gcm, err := cipher.NewGCM(blockCipher)
if err != nil {
return "", errors.Wrapf(err, "Encrypt: new gcm failed")
return "", fmt.Errorf("Encrypt: new gcm failed: %w", err)
}
nonce := randomBytes(gcm.NonceSize())
@ -143,13 +144,13 @@ func (e *hdPubKeyEncrypter) Encrypt(payload []byte) (string, error) {
err = addVariableBytes(result, []byte(e.receiverKey.Path))
if err != nil {
return "", errors.Wrapf(err, "Encrypt: failed to add receiver path")
return "", fmt.Errorf("Encrypt: failed to add receiver path: %w", err)
}
nonceLen := uint16(len(nonce))
err = binary.Write(result, binary.BigEndian, &nonceLen)
if err != nil {
return "", errors.Wrapf(err, "Encrypt: failed to add nonce len")
return "", fmt.Errorf("Encrypt: failed to add nonce len: %w", err)
}
ciphertext := gcm.Seal(nil, nonce, plaintext.Bytes(), result.Bytes())
@ -157,12 +158,12 @@ func (e *hdPubKeyEncrypter) Encrypt(payload []byte) (string, error) {
// result is "additionalData || nonce || ciphertext"
n, err := result.Write(nonce)
if err != nil || n != len(nonce) {
return "", errors.Errorf("Encrypt: failed to add nonce")
return "", errors.New("Encrypt: failed to add nonce")
}
n, err = result.Write(ciphertext)
if err != nil || n != len(ciphertext) {
return "", errors.Errorf("Encrypt: failed to add ciphertext")
return "", errors.New("Encrypt: failed to add ciphertext")
}
return base58.Encode(result.Bytes()), nil
@ -185,13 +186,13 @@ func extractVariableBytes(reader *bytes.Reader, limit int) ([]byte, error) {
var len uint16
err := binary.Read(reader, binary.BigEndian, &len)
if err != nil || int(len) > limit || int(len) > reader.Len() {
return nil, errors.Errorf("failed to read byte array len")
return nil, errors.New("failed to read byte array len")
}
result := make([]byte, len)
n, err := reader.Read(result)
if err != nil || n != int(len) {
return nil, errors.Errorf("failed to extract byte array")
return nil, errors.New("failed to extract byte array")
}
return result, nil
@ -210,22 +211,22 @@ func (d *hdPrivKeyDecrypter) Decrypt(payload string) ([]byte, error) {
reader := bytes.NewReader(decoded)
version, err := reader.ReadByte()
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: failed to read version byte")
return nil, fmt.Errorf("Decrypt: failed to read version byte: %w", err)
}
if version != PKEncryptionVersion {
return nil, errors.Errorf("Decrypt: found key version %v, expected %v",
return nil, fmt.Errorf("Decrypt: found key version %v, expected %v",
version, PKEncryptionVersion)
}
rawPubEph := make([]byte, serializedPublicKeyLength)
n, err := reader.Read(rawPubEph)
if err != nil || n != serializedPublicKeyLength {
return nil, errors.Errorf("Decrypt: failed to read pubeph")
return nil, errors.New("Decrypt: failed to read pubeph")
}
receiverPath, err := extractVariableString(reader, maxDerivationPathLen)
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: failed to extract receiver path")
return nil, fmt.Errorf("Decrypt: failed to extract receiver path: %w", err)
}
// additionalDataSize is Whatever I've read so far plus two bytes for the nonce len
@ -234,24 +235,24 @@ func (d *hdPrivKeyDecrypter) Decrypt(payload string) ([]byte, error) {
minCiphertextLen := 2 // an empty sig with no plaintext
nonce, err := extractVariableBytes(reader, reader.Len()-minCiphertextLen)
if err != nil || len(nonce) < minNonceLen {
return nil, errors.Errorf("Decrypt: failed to read nonce")
return nil, errors.New("Decrypt: failed to read nonce")
}
// What's left is the ciphertext
ciphertext := make([]byte, reader.Len())
_, err = reader.Read(ciphertext)
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: failed to read ciphertext")
return nil, fmt.Errorf("Decrypt: failed to read ciphertext: %w", err)
}
receiverKey, err := d.receiverKey.DeriveTo(receiverPath)
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: failed to derive receiver key to path %v", receiverPath)
return nil, fmt.Errorf("Decrypt: failed to derive receiver key to path %v: %w", receiverPath, err)
}
encryptionKey, err := receiverKey.key.ECPrivKey()
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: failed to extract encryption key")
return nil, fmt.Errorf("Decrypt: failed to extract encryption key: %w", err)
}
var verificationKey *btcec.PublicKey
@ -259,7 +260,7 @@ func (d *hdPrivKeyDecrypter) Decrypt(payload string) ([]byte, error) {
// Use the derived receiver key if the sender key is not provided
verificationKey, err = receiverKey.PublicKey().key.ECPubKey()
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: failed to extract verification key")
return nil, fmt.Errorf("Decrypt: failed to extract verification key: %w", err)
}
} else if d.senderKey != nil {
verificationKey = d.senderKey.key
@ -267,34 +268,34 @@ func (d *hdPrivKeyDecrypter) Decrypt(payload string) ([]byte, error) {
sharedSecret, err := recoverSharedEncryptionSecretForAES(encryptionKey, rawPubEph)
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: failed to recover shared secret")
return nil, fmt.Errorf("Decrypt: failed to recover shared secret: %w", err)
}
blockCipher, err := aes.NewCipher(sharedSecret)
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: new aes failed")
return nil, fmt.Errorf("Decrypt: new aes failed: %w", err)
}
gcm, err := cipher.NewGCMWithNonceSize(blockCipher, len(nonce))
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: new gcm failed")
return nil, fmt.Errorf("Decrypt: new gcm failed: %w", err)
}
plaintext, err := gcm.Open(nil, nonce, ciphertext, decoded[:additionalDataSize])
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: AEAD failed")
return nil, fmt.Errorf("Decrypt: AEAD failed: %w", err)
}
plaintextReader := bytes.NewReader(plaintext)
sig, err := extractVariableBytes(plaintextReader, maxSignatureLen)
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: failed to read sig")
return nil, fmt.Errorf("Decrypt: failed to read sig: %w", err)
}
data, err := extractVariableBytes(plaintextReader, plaintextReader.Len())
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: failed to extract user data")
return nil, fmt.Errorf("Decrypt: failed to extract user data: %w", err)
}
signatureData := make([]byte, 0, len(sig)+serializedPublicKeyLength)
@ -303,10 +304,10 @@ func (d *hdPrivKeyDecrypter) Decrypt(payload string) ([]byte, error) {
hash := sha256.Sum256(signatureData)
signatureKey, _, err := btcec.RecoverCompact(btcec.S256(), sig, hash[:])
if err != nil {
return nil, errors.Wrapf(err, "Decrypt: failed to verify signature")
return nil, fmt.Errorf("Decrypt: failed to verify signature: %w", err)
}
if verificationKey != nil && !signatureKey.IsEqual(verificationKey) {
return nil, errors.Errorf("Decrypt: signing key mismatch")
return nil, errors.New("Decrypt: signing key mismatch")
}
return data, nil
@ -331,7 +332,7 @@ func encryptWithPubKey(pubKey *btcec.PublicKey, plaintext []byte) (*btcec.Public
ciphertext, err := aescbc.EncryptNoPadding(paddedSerializeBigInt(aescbc.KeySize, sharedSecret), iv, plaintext)
if err != nil {
return nil, nil, errors.Wrapf(err, "encryptWithPubKey: encrypt failed")
return nil, nil, fmt.Errorf("encryptWithPubKey: encrypt failed: %w", err)
}
return pubEph, ciphertext, nil
@ -342,7 +343,7 @@ func encryptWithPubKey(pubKey *btcec.PublicKey, plaintext []byte) (*btcec.Public
func generateSharedEncryptionSecret(pubKey *btcec.PublicKey) (*btcec.PublicKey, *big.Int, error) {
privEph, err := btcec.NewPrivateKey(btcec.S256())
if err != nil {
return nil, nil, errors.Wrapf(err, "generateSharedEncryptionSecretForAES: failed to generate key")
return nil, nil, fmt.Errorf("generateSharedEncryptionSecretForAES: failed to generate key: %w", err)
}
sharedSecret, _ := pubKey.ScalarMult(pubKey.X, pubKey.Y, privEph.D.Bytes())
@ -374,7 +375,7 @@ func decryptWithPrivKey(privKey *btcec.PrivateKey, rawPubEph []byte, ciphertext
plaintext, err := aescbc.DecryptNoPadding(paddedSerializeBigInt(aescbc.KeySize, sharedSecret), iv, ciphertext)
if err != nil {
return nil, errors.Wrapf(err, "decryptWithPrivKey: failed to decrypt")
return nil, fmt.Errorf("decryptWithPrivKey: failed to decrypt: %w", err)
}
return plaintext, nil
@ -385,7 +386,7 @@ func decryptWithPrivKey(privKey *btcec.PrivateKey, rawPubEph []byte, ciphertext
func recoverSharedEncryptionSecret(privKey *btcec.PrivateKey, rawPubEph []byte) (*big.Int, error) {
pubEph, err := btcec.ParsePubKey(rawPubEph, btcec.S256())
if err != nil {
return nil, errors.Wrapf(err, "recoverSharedEncryptionSecretForAES: failed to parse pub eph")
return nil, fmt.Errorf("recoverSharedEncryptionSecretForAES: failed to parse pub eph: %w", err)
}
sharedSecret, _ := pubEph.ScalarMult(pubEph.X, pubEph.Y, privKey.D.Bytes())

22
vendor/github.com/muun/libwallet/errors.go generated vendored Normal file
View File

@ -0,0 +1,22 @@
package libwallet
const (
ErrUnknown = 1
ErrInvalidURI = 2
ErrNetwork = 3
ErrInvalidPrivateKey = 4
ErrInvalidDerivationPath = 5
ErrInvalidInvoice = 6
)
func ErrorCode(err error) int64 {
type coder interface {
Code() int64
}
switch e := err.(type) {
case coder:
return e.Code()
default:
return ErrUnknown
}
}

28
vendor/github.com/muun/libwallet/errors/errors.go generated vendored Normal file
View File

@ -0,0 +1,28 @@
package errors
import (
"errors"
"fmt"
)
type Error struct {
err error
code int64
}
func (e *Error) Error() string {
return e.err.Error()
}
func (e *Error) Code() int64 {
return e.code
}
func New(code int64, msg string) error {
return &Error{errors.New(msg), code}
}
func Errorf(code int64, format string, a ...interface{}) error {
err := fmt.Errorf(format, a...)
return &Error{err, code}
}

View File

@ -10,10 +10,14 @@ require (
github.com/lightningnetwork/lightning-onion v1.0.1
github.com/lightningnetwork/lnd v0.10.4-beta
github.com/miekg/dns v1.1.29 // indirect
github.com/pdfcpu/pdfcpu v0.3.8
github.com/pkg/errors v0.9.1
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37
golang.org/x/mobile v0.0.0-20200720140940-1a48f808d81f // indirect
golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56 // indirect
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
google.golang.org/protobuf v1.25.0
gopkg.in/gormigrate.v1 v1.6.0
)
// Fork that includes the -cache flag for quicker builds
replace golang.org/x/mobile => github.com/champo/mobile v0.0.0-20201226003606-ef8e5756cda7

View File

@ -24,7 +24,6 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/btcsuite/btcd v0.0.0-20190629003639-c26ffa870fd8/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI=
github.com/btcsuite/btcd v0.0.0-20190824003749-130ea5bddde3/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI=
github.com/btcsuite/btcd v0.20.0-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ=
github.com/btcsuite/btcd v0.20.1-beta h1:Ik4hyJqN8Jfyv3S4AGBOmyouMsYE3EdYODkMbQjwPGw=
github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ=
github.com/btcsuite/btcd v0.20.1-beta.0.20200513120220-b470eee47728/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ=
@ -38,7 +37,6 @@ github.com/btcsuite/btcutil v1.0.2 h1:9iZ1Terx9fMIOtq1VrwdqfsATL9MC2l8ZrUY6YZ2ut
github.com/btcsuite/btcutil v1.0.2/go.mod h1:j9HUFwoQRsZL3V4n+qG+CUnEGHOarIxfC3Le2Yhbcts=
github.com/btcsuite/btcutil/psbt v1.0.2 h1:gCVY3KxdoEVU7Q6TjusPO+GANIwVgr9yTLqM+a6CZr8=
github.com/btcsuite/btcutil/psbt v1.0.2/go.mod h1:LVveMu4VaNSkIRTZu2+ut0HDBRuYjqGocxDMNS1KuGQ=
github.com/btcsuite/btcwallet v0.10.0/go.mod h1:4TqBEuceheGNdeLNrelliLHJzmXauMM2vtWfuy1pFiM=
github.com/btcsuite/btcwallet v0.11.1-0.20200612012534-48addcd5591a h1:AZ1Mf0gd9mgJqrTTIFUc17ep9EKUbQusVAIzJ6X+x3Q=
github.com/btcsuite/btcwallet v0.11.1-0.20200612012534-48addcd5591a/go.mod h1:9+AH3V5mcTtNXTKe+fe63fDLKGOwQbZqmvOVUef+JFE=
github.com/btcsuite/btcwallet/wallet/txauthor v1.0.0 h1:KGHMW5sd7yDdDMkCZ/JpP0KltolFsQcB973brBnfj4c=
@ -48,7 +46,6 @@ github.com/btcsuite/btcwallet/wallet/txrules v1.0.0/go.mod h1:UwQE78yCerZ313EXZw
github.com/btcsuite/btcwallet/wallet/txsizes v1.0.0 h1:6DxkcoMnCPY4E9cUDPB5tbuuf40SmmMkSQkoE8vCT+s=
github.com/btcsuite/btcwallet/wallet/txsizes v1.0.0/go.mod h1:pauEU8UuMFiThe5PB3EO+gO5kx87Me5NvdQDsTuq6cs=
github.com/btcsuite/btcwallet/walletdb v1.0.0/go.mod h1:bZTy9RyYZh9fLnSua+/CD48TJtYJSHjjYcSaszuxCCk=
github.com/btcsuite/btcwallet/walletdb v1.1.0/go.mod h1:bZTy9RyYZh9fLnSua+/CD48TJtYJSHjjYcSaszuxCCk=
github.com/btcsuite/btcwallet/walletdb v1.2.0/go.mod h1:9cwc1Yyg4uvd4ZdfdoMnALji+V9gfWSMfxEdLdR5Vwc=
github.com/btcsuite/btcwallet/walletdb v1.3.1/go.mod h1:9cwc1Yyg4uvd4ZdfdoMnALji+V9gfWSMfxEdLdR5Vwc=
github.com/btcsuite/btcwallet/walletdb v1.3.2/go.mod h1:GZCMPNpUu5KE3ASoVd+k06p/1OW8OwNGCCaNWRto2cQ=
@ -57,7 +54,6 @@ github.com/btcsuite/btcwallet/walletdb v1.3.3/go.mod h1:oJDxAEUHVtnmIIBaa22wSBPT
github.com/btcsuite/btcwallet/wtxmgr v1.0.0/go.mod h1:vc4gBprll6BP0UJ+AIGDaySoc7MdAmZf8kelfNb8CFY=
github.com/btcsuite/btcwallet/wtxmgr v1.2.0 h1:ZUYPsSv8GjF9KK7lboB2OVHF0uYEcHxgrCfFWqPd9NA=
github.com/btcsuite/btcwallet/wtxmgr v1.2.0/go.mod h1:h8hkcKUE3X7lMPzTUoGnNiw5g7VhGrKEW3KpR2r0VnY=
github.com/btcsuite/fastsha256 v0.0.0-20160815193821-637e65642941/go.mod h1:QcFA8DZHtuIAdYKCq/BzELOaznRsCvwf4zTPmaYwaig=
github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd h1:R/opQEbFEy9JGkIguV40SvRY1uliPX8ifOvi6ICsFCw=
github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg=
github.com/btcsuite/golangcrypto v0.0.0-20150304025918-53f62d9b43e8/go.mod h1:tYvUd8KLhm/oXvUeSEs2VlLghFjQt9+ZaF9ghH0JNjc=
@ -72,6 +68,10 @@ github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtE
github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/champo/mobile v0.0.0-20201225234154-3393de95d3bb h1:Doj1b3qkFX5zakU7uJ1lpsER6GNS4R65Zbfrpz2fIWE=
github.com/champo/mobile v0.0.0-20201225234154-3393de95d3bb/go.mod h1:skQtrUTUwhdJvXM/2KKJzY8pDgNr9I/FOMqDVRPBUS4=
github.com/champo/mobile v0.0.0-20201226003606-ef8e5756cda7 h1:jbaq2lXHNbmLj9Ab3upCbYSZ/j/TQ6yzDwie/pNyfqA=
github.com/champo/mobile v0.0.0-20201226003606-ef8e5756cda7/go.mod h1:skQtrUTUwhdJvXM/2KKJzY8pDgNr9I/FOMqDVRPBUS4=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/coreos/bbolt v1.3.3/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -87,6 +87,7 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y=
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0=
github.com/frankban/quicktest v1.2.2/go.mod h1:Qh/WofXFeiAFII1aEBu529AtJo6Zg2VHscnEsbBnJ20=
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w=
@ -132,9 +133,14 @@ github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0 h1:Iju5GlWwrvL6UBg4zJJt3btmonfrMlCDdsejg4CZE7c=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v0.0.0-20170724004829-f2862b476edc/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/grpc-ecosystem/grpc-gateway v1.8.6 h1:XvND7+MPP7Jp+JpqSZ7naSl5nVZf6k0LbL1V3EKh0zc=
github.com/grpc-ecosystem/grpc-gateway v1.8.6/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/hhrutter/lzw v0.0.0-20190827003112-58b82c5a41cc/go.mod h1:yJBvOcu1wLQ9q9XZmfiPfur+3dQJuIhYQsMGLYcItZk=
github.com/hhrutter/lzw v0.0.0-20190829144645-6f07a24e8650 h1:1yY/RQWNSBjJe2GDCIYoLmpWVidrooriUr4QS/zaATQ=
github.com/hhrutter/lzw v0.0.0-20190829144645-6f07a24e8650/go.mod h1:yJBvOcu1wLQ9q9XZmfiPfur+3dQJuIhYQsMGLYcItZk=
github.com/hhrutter/tiff v0.0.0-20190829141212-736cae8d0bc7 h1:o1wMw7uTNyA58IlEdDpxIrtFHTgnvYzA8sCQz8luv94=
github.com/hhrutter/tiff v0.0.0-20190829141212-736cae8d0bc7/go.mod h1:WkUxfS2JUu3qPo6tRld7ISb8HiC0gVSU91kooBMDVok=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/jackpal/gateway v1.0.5 h1:qzXWUJfuMdlLMtt0a3Dgt+xkWQiA5itDEITVJtuSwMc=
github.com/jackpal/gateway v1.0.5/go.mod h1:lTpwd4ACLXmpyiCTRtfiNyVnUmqT9RivzCDQetPfnjA=
@ -145,8 +151,6 @@ github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M
github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jinzhu/gorm v1.9.2/go.mod h1:Vla75njaFJ8clLU1W44h34PjIkijhjHIYnZxMqCdxqo=
github.com/jinzhu/gorm v1.9.15 h1:OdR1qFvtXktlxk73XFYMiYn9ywzTwytqe4QkuMRqc38=
github.com/jinzhu/gorm v1.9.15/go.mod h1:G3LB3wezTOWM2ITLzPxEXgSkOXAntiLHS7UdBefADcs=
github.com/jinzhu/gorm v1.9.16 h1:+IyIjPEABKRpsu/F8OvDPy9fyQlgsg2luMV2ZIH5i5o=
github.com/jinzhu/gorm v1.9.16/go.mod h1:G3LB3wezTOWM2ITLzPxEXgSkOXAntiLHS7UdBefADcs=
github.com/jinzhu/inflection v0.0.0-20180308033659-04140366298a/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
@ -174,25 +178,22 @@ github.com/kkdai/bstream v0.0.0-20181106074824-b3251f7901ec h1:n1NeQ3SgUHyISrjFF
github.com/kkdai/bstream v0.0.0-20181106074824-b3251f7901ec/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.1.1 h1:sJZmqHoEaY7f+NPP8pgLB/WxulyR3fewgCM2qaSlBb4=
github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lightninglabs/gozmq v0.0.0-20190710231225-cea2a031735d/go.mod h1:vxmQPeIQxPf6Jf9rM8R+B4rKBqLA2AjttNxkFBL2Plk=
github.com/lightninglabs/gozmq v0.0.0-20191113021534-d20a764486bf h1:HZKvJUHlcXI/f/O0Avg7t8sqkPo78HFzjmeYFl6DPnc=
github.com/lightninglabs/gozmq v0.0.0-20191113021534-d20a764486bf/go.mod h1:vxmQPeIQxPf6Jf9rM8R+B4rKBqLA2AjttNxkFBL2Plk=
github.com/lightninglabs/neutrino v0.10.0/go.mod h1:C3KhCMk1Mcx3j8v0qRVWM1Ow6rIJSvSPnUAq00ZNAfk=
github.com/lightninglabs/neutrino v0.11.0/go.mod h1:CuhF0iuzg9Sp2HO6ZgXgayviFTn1QHdSTJlMncK80wg=
github.com/lightninglabs/neutrino v0.11.1-0.20200316235139-bffc52e8f200 h1:j4iZ1XlUAPQmW6oSzMcJGILYsRHNs+4O3Gk+2Ms5Dww=
github.com/lightninglabs/neutrino v0.11.1-0.20200316235139-bffc52e8f200/go.mod h1:MlZmoKa7CJP3eR1s5yB7Rm5aSyadpKkxqAwLQmog7N0=
github.com/lightninglabs/protobuf-hex-display v1.3.3-0.20191212020323-b444784ce75d/go.mod h1:KDb67YMzoh4eudnzClmvs2FbiLG9vxISmLApUkCa4uI=
github.com/lightningnetwork/lightning-onion v0.0.0-20190909101754-850081b08b6a/go.mod h1:rigfi6Af/KqsF7Za0hOgcyq2PNH4AN70AaMRxcJkff4=
github.com/lightningnetwork/lightning-onion v1.0.1 h1:qChGgS5+aPxFeR6JiUsGvanei1bn6WJpYbvosw/1604=
github.com/lightningnetwork/lightning-onion v1.0.1/go.mod h1:rigfi6Af/KqsF7Za0hOgcyq2PNH4AN70AaMRxcJkff4=
github.com/lightningnetwork/lnd v0.8.0-beta h1:HmmhSRTq48qobqQF8YLqNa8eKU8dDBNbWWpr2VzycJM=
github.com/lightningnetwork/lnd v0.8.0-beta/go.mod h1:nq06y2BDv7vwWeMmwgB7P3pT7/Uj7sGf5FzHISVD6t4=
github.com/lightningnetwork/lnd v0.10.4-beta h1:Af2zOCPePeaU8Tkl8IqtTjr4BP3zYfi+hAtQYcCMM58=
github.com/lightningnetwork/lnd v0.10.4-beta/go.mod h1:4d02pduRVtZwgTJ+EimKJTsEAY0jDwi0SPE9h5aRneM=
github.com/lightningnetwork/lnd/cert v1.0.2 h1:g2rEu+sM2Uyz0bpfuvwri/ks6R/26H5iY1NcGbpDJ+c=
@ -217,13 +218,16 @@ github.com/miekg/dns v0.0.0-20171125082028-79bfde677fa8/go.mod h1:W1PPwlIAgtquWB
github.com/miekg/dns v1.1.29 h1:xHBEhR+t5RzcFJjBLJlax2daXOrTYtr9z4WdKEfWFzg=
github.com/miekg/dns v1.1.29/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/muun/libwallet v0.4.0 h1:mqvEA+EpZeyXPOhcm61H8OL3AQxEuvelsm3VqYqIEIY=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v1.4.1/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
github.com/onsi/gomega v1.4.3 h1:RE1xgDvH7imwFD45h+u2SgIfERHlS2yNG4DObb5BSKU=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/pdfcpu/pdfcpu v0.3.8 h1:wdKii186dzmr/aP/fkJl2s9yT3TZcwc1VqgfabNymGI=
github.com/pdfcpu/pdfcpu v0.3.8/go.mod h1:EfJ1EIo3n5+YlGF53DGe1yF1wQLiqK1eqGDN5LuKALs=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
@ -269,23 +273,22 @@ golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191205180655-e7c4368fe9dd/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904 h1:bXoxMPcSLOq08zI3/c5dEBT6lE4eh+jOh886GHrn6V8=
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37 h1:cg5LA/zNPRzIXIWSCxQW10Rvpy94aQh3LT/ShoCpkHw=
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190731235908-ec7cb31e5a56/go.mod h1:JhuoJpWY28nO4Vef9tZUw9qufEGTyX1+7lmHxV5q5G4=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20190823064033-3a9bac650e44/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5 h1:QelT11PB4FXiDEXucrfNckHoFxwt8USGY1ajP1ZF5lM=
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20200720140940-1a48f808d81f h1:I/h48WbtIgA+7yh90BQGaTm4aoyybl/D5N+N6JIfuCI=
golang.org/x/mobile v0.0.0-20200720140940-1a48f808d81f/go.mod h1:skQtrUTUwhdJvXM/2KKJzY8pDgNr9I/FOMqDVRPBUS4=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.1.1-0.20191209134235-331c550502dd h1:ePuNC7PZ6O5BzgPn9bZayERXBdfZjUYoXEf5BTfDfh8=
golang.org/x/mod v0.1.1-0.20191209134235-331c550502dd/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180719180050-a680a1efc54d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -298,7 +301,6 @@ golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20190206173232-65e2d4e15006/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3 h1:0GoQqolDA55aaLxZyTzK/Y2ePZzZTUrRacwib7cNsYQ=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@ -309,7 +311,6 @@ golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAG
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -323,14 +324,11 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd h1:xhmwyvizuTgC2qz7ZlMluP20uW+C3Rm0FD/WLDX8884=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2 h1:z99zHgr7hKfrUcX/KsoJk5FJfjTceCKIp96+biqP4To=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2 h1:+DCIGbF/swA92ohVg0//6X2IVY3KZs6p9mix0ziNYJM=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@ -352,19 +350,16 @@ google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190201180003-4b09977fb922/go.mod h1:L3J43x8/uS+qIUoksaLKe6OS3nUKxOKuIFz1sl2/jx4=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 h1:+kGHl1aib/qcwaRi1CbqBZ1rk19r85MNUf8HaBghugY=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio=
google.golang.org/grpc v1.18.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.27.0 h1:rRYRFMVgRv6E0D70Skyfsr28tDXIuuPZyWGMPdMcnXg=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0 h1:qdOKuR/EIArgaWNjetjgTzgVTAZ+S/WXVrq9HW9zimw=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
@ -374,22 +369,20 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/errgo.v1 v1.0.1 h1:oQFRXzZ7CkBGdm1XZm/EbQYaYNNEElNBOd09M6cqNso=
gopkg.in/errgo.v1 v1.0.1/go.mod h1:3NjfXwocQRYAPTq4/fzX+CwUhPRcR/azYRhj8G+LqMo=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/gormigrate.v1 v1.6.0 h1:XpYM6RHQPmzwY7Uyu+t+xxMXc86JYFJn4nEc9HzQjsI=
gopkg.in/gormigrate.v1 v1.6.0/go.mod h1:Lf00lQrHqfSYWiTtPcyQabsDdM6ejZaMgV0OU6JMSlw=
gopkg.in/macaroon-bakery.v2 v2.0.1 h1:0N1TlEdfLP4HXNCg7MQUMp5XwvOoxk+oe9Owr2cpvsc=
gopkg.in/macaroon-bakery.v2 v2.0.1/go.mod h1:B4/T17l+ZWGwxFSZQmlBwp25x+og7OkhETfr3S9MbIA=
gopkg.in/macaroon.v2 v2.0.0 h1:LVWycAfeJBUjCIqfR9gqlo7I8vmiXRr51YEOZ1suop8=
gopkg.in/macaroon.v2 v2.0.0/go.mod h1:+I6LnTMkm/uV5ew/0nsulNjL16SK4+C8yDmRUzHR17I=
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=

View File

@ -2,10 +2,11 @@ package libwallet
import (
"crypto/sha256"
"errors"
"fmt"
"strings"
"github.com/muun/libwallet/hdpath"
"github.com/pkg/errors"
"github.com/btcsuite/btcutil/hdkeychain"
)
@ -91,17 +92,17 @@ func (p *HDPrivateKey) DerivedAt(index int64, hardened bool) (*HDPrivateKey, err
func (p *HDPrivateKey) DeriveTo(path string) (*HDPrivateKey, error) {
if !strings.HasPrefix(path, p.Path) {
return nil, errors.Errorf("derivation path %v is not prefix of the keys path %v", path, p.Path)
return nil, fmt.Errorf("derivation path %v is not prefix of the keys path %v", path, p.Path)
}
firstPath, err := hdpath.Parse(p.Path)
if err != nil {
return nil, errors.Wrapf(err, "couldn't parse derivation path %v", p.Path)
return nil, fmt.Errorf("couldn't parse derivation path %v: %w", p.Path, err)
}
secondPath, err := hdpath.Parse(path)
if err != nil {
return nil, errors.Wrapf(err, "couldn't parse derivation path %v", path)
return nil, fmt.Errorf("couldn't parse derivation path %v: %w", path, err)
}
indexes := secondPath.IndexesFrom(firstPath)
@ -109,7 +110,7 @@ func (p *HDPrivateKey) DeriveTo(path string) (*HDPrivateKey, error) {
for depth, index := range indexes {
derivedKey, err = derivedKey.DerivedAt(int64(index.Index), index.Hardened)
if err != nil {
return nil, errors.Wrapf(err, "failed to derive key at path %v on depth %v", path, depth)
return nil, fmt.Errorf("failed to derive key at path %v on depth %v: %w", path, depth, err)
}
}
// The generated path has no names in it, so replace it

View File

@ -1,11 +1,13 @@
package libwallet
import (
"errors"
"fmt"
"strings"
"github.com/muun/libwallet/hdpath"
"github.com/pkg/errors"
"github.com/btcsuite/btcutil"
"github.com/btcsuite/btcutil/hdkeychain"
)
@ -42,7 +44,7 @@ func (p *HDPublicKey) String() string {
func (p *HDPublicKey) DerivedAt(index int64) (*HDPublicKey, error) {
if index&hdkeychain.HardenedKeyStart != 0 {
return nil, errors.Errorf("can't derive a hardened pub key (index %v)", index)
return nil, fmt.Errorf("can't derive a hardened pub key (index %v)", index)
}
child, err := p.key.Child(uint32(index))
@ -57,29 +59,29 @@ func (p *HDPublicKey) DerivedAt(index int64) (*HDPublicKey, error) {
func (p *HDPublicKey) DeriveTo(path string) (*HDPublicKey, error) {
if !strings.HasPrefix(path, p.Path) {
return nil, errors.Errorf("derivation path %v is not prefix of the keys path %v", path, p.Path)
return nil, fmt.Errorf("derivation path %v is not prefix of the keys path %v", path, p.Path)
}
firstPath, err := hdpath.Parse(p.Path)
if err != nil {
return nil, errors.Wrapf(err, "couldn't parse derivation path %v", p.Path)
return nil, fmt.Errorf("couldn't parse derivation path %v: %w", p.Path, err)
}
secondPath, err := hdpath.Parse(path)
if err != nil {
return nil, errors.Wrapf(err, "couldn't parse derivation path %v", path)
return nil, fmt.Errorf("couldn't parse derivation path %v: %w", path, err)
}
indexes := secondPath.IndexesFrom(firstPath)
derivedKey := p
for depth, index := range indexes {
if index.Hardened {
return nil, errors.Errorf("can't derive a hardened pub key (path %v)", path)
return nil, fmt.Errorf("can't derive a hardened pub key (path %v)", path)
}
derivedKey, err = derivedKey.DerivedAt(int64(index.Index))
if err != nil {
return nil, errors.Wrapf(err, "failed to derive key at path %v on depth %v", path, depth)
return nil, fmt.Errorf("failed to derive key at path %v on depth %v: %w", path, depth, err)
}
}
// The generated path has no names in it, so replace it
@ -98,3 +100,17 @@ func (p *HDPublicKey) Raw() []byte {
return key.SerializeCompressed()
}
// Fingerprint returns the 4-byte fingerprint for this pubkey
func (p *HDPublicKey) Fingerprint() []byte {
key, err := p.key.ECPubKey()
if err != nil {
panic("failed to extract pub key")
}
bytes := key.SerializeCompressed()
hash := btcutil.Hash160(bytes)
return hash[:4]
}

View File

@ -3,6 +3,7 @@ package libwallet
import (
"bytes"
"crypto/sha256"
"errors"
"fmt"
"github.com/btcsuite/btcd/chaincfg"
@ -12,7 +13,6 @@ import (
"github.com/lightningnetwork/lnd/lnwire"
"github.com/muun/libwallet/hdpath"
"github.com/muun/libwallet/sphinx"
"github.com/pkg/errors"
)
type coinIncomingSwap struct {
@ -24,6 +24,7 @@ type coinIncomingSwap struct {
SwapServerPublicKey []byte
ExpirationHeight int64
VerifyOutputAmount bool // used only for fulfilling swaps through IncomingSwap
Collect btcutil.Amount
}
func (c *coinIncomingSwap) SignInput(index int, tx *wire.MsgTx, userKey *HDPrivateKey, muunKey *HDPublicKey) error {
@ -124,13 +125,16 @@ func (c *coinIncomingSwap) SignInput(index int, tx *wire.MsgTx, userKey *HDPriva
// Now check the information we have against the sphinx created by the payer
if len(c.Sphinx) > 0 {
// This incoming swap might be collecting debt, which would be deducted from the outputAmount
// so we add it back up so the amount will match with the sphinx
expectedAmount := outputAmount + lnwire.NewMSatFromSatoshis(c.Collect)
err = sphinx.Validate(
c.Sphinx,
c.PaymentHash256,
secrets.PaymentSecret,
nodeKey,
uint32(c.ExpirationHeight),
outputAmount,
expectedAmount,
c.Network,
)
if err != nil {
@ -175,7 +179,7 @@ func (c *coinIncomingSwap) FullySignInput(index int, tx *wire.MsgTx, userKey, mu
derivedMuunKey, err := muunKey.DeriveTo(secrets.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive muun key")
return fmt.Errorf("failed to derive muun key: %w", err)
}
muunSignature, err := c.signature(index, tx, userKey.PublicKey(), derivedMuunKey.PublicKey(), derivedMuunKey)

View File

@ -4,7 +4,7 @@ import (
"fmt"
"github.com/lightningnetwork/lnd/zpay32"
"github.com/pkg/errors"
"github.com/muun/libwallet/errors"
)
// Invoice is muun's invoice struct
@ -27,11 +27,11 @@ func ParseInvoice(rawInput string, network *Network) (*Invoice, error) {
_, components := buildUriFromString(rawInput, lightningScheme)
if components == nil {
return nil, errors.Errorf("failed to parse uri %v", rawInput)
return nil, errors.Errorf(ErrInvalidInvoice, "failed to parse uri %v", rawInput)
}
if components.Scheme != "lightning" {
return nil, errors.Errorf("invalid scheme %v", components.Scheme)
return nil, errors.Errorf(ErrInvalidInvoice, "invalid scheme %v", components.Scheme)
}
invoice := components.Opaque
@ -44,7 +44,7 @@ func ParseInvoice(rawInput string, network *Network) (*Invoice, error) {
parsedInvoice, err := zpay32.Decode(invoice, network.network)
if err != nil {
return nil, errors.Wrapf(err, "Couldnt parse invoice")
return nil, errors.Errorf(ErrInvalidInvoice, "Couldn't parse invoice: %w", err)
}
var fallbackAdd *MuunPaymentURI
@ -52,7 +52,7 @@ func ParseInvoice(rawInput string, network *Network) (*Invoice, error) {
if parsedInvoice.FallbackAddr != nil {
fallbackAdd, err = GetPaymentURI(parsedInvoice.FallbackAddr.String(), network)
if err != nil {
return nil, errors.Wrapf(err, "Couldnt get address")
return nil, errors.Errorf(ErrInvalidInvoice, "Couldn't get address: %w", err)
}
}

View File

@ -5,6 +5,7 @@ import (
"crypto/sha256"
"encoding/binary"
"encoding/hex"
"errors"
"fmt"
"path"
"time"
@ -16,9 +17,9 @@ import (
"github.com/lightningnetwork/lnd/lnwire"
"github.com/lightningnetwork/lnd/netann"
"github.com/lightningnetwork/lnd/zpay32"
"github.com/pkg/errors"
"github.com/muun/libwallet/hdpath"
"github.com/muun/libwallet/sphinx"
"github.com/muun/libwallet/walletdb"
)
@ -185,6 +186,9 @@ func CreateInvoice(net *Network, userKey *HDPrivateKey, routeHints *RouteHints,
if err != nil {
return "", err
}
if dbInvoice == nil {
return "", nil
}
var paymentHash [32]byte
copy(paymentHash[:], dbInvoice.PaymentHash)
@ -268,6 +272,76 @@ func CreateInvoice(net *Network, userKey *HDPrivateKey, routeHints *RouteHints,
return bech32, nil
}
// ExposePreimage gives the preimage matching a payment hash if we have it
func ExposePreimage(paymentHash []byte) ([]byte, error) {
if len(paymentHash) != 32 {
return nil, fmt.Errorf("ExposePreimage: received invalid hash len %v", len(paymentHash))
}
// Lookup invoice data matching this HTLC using the payment hash
db, err := openDB()
if err != nil {
return nil, err
}
defer db.Close()
secrets, err := db.FindByPaymentHash(paymentHash)
if err != nil {
return nil, fmt.Errorf("could not find invoice data for payment hash: %w", err)
}
return secrets.Preimage, nil
}
func IsInvoiceFulfillable(paymentHash, onionBlob []byte, amount int64, userKey *HDPrivateKey, net *Network) error {
if len(paymentHash) != 32 {
return fmt.Errorf("IsInvoiceFulfillable: received invalid hash len %v", len(paymentHash))
}
// Lookup invoice data matching this HTLC using the payment hash
db, err := openDB()
if err != nil {
return err
}
defer db.Close()
secrets, err := db.FindByPaymentHash(paymentHash)
if err != nil {
return fmt.Errorf("IsInvoiceFulfillable: could not find invoice data for payment hash: %w", err)
}
if len(onionBlob) == 0 {
return nil
}
identityKeyPath := hdpath.MustParse(secrets.KeyPath).Child(identityKeyChildIndex)
nodeHDKey, err := userKey.DeriveTo(identityKeyPath.String())
if err != nil {
return fmt.Errorf("IsInvoiceFulfillable: failed to derive key: %w", err)
}
nodeKey, err := nodeHDKey.key.ECPrivKey()
if err != nil {
return fmt.Errorf("IsInvoiceFulfillable: failed to get priv key: %w", err)
}
err = sphinx.Validate(
onionBlob,
paymentHash,
secrets.PaymentSecret,
nodeKey,
0, // This is used internally by the sphinx decoder but it's not needed
lnwire.MilliSatoshi(uint64(amount)*1000),
net.network,
)
if err != nil {
return fmt.Errorf("IsInvoiceFuflillable: invalid sphinx: %w", err)
}
return nil
}
type IncomingSwap struct {
FulfillmentTx []byte
MuunSignature []byte
@ -282,6 +356,7 @@ type IncomingSwap struct {
HtlcExpiration int64
HtlcBlock []byte // unused
ConfirmationTarget int64 // to validate fee rate, unused for now
CollectInSats int64
}
func (s *IncomingSwap) VerifyAndFulfill(userKey *HDPrivateKey, muunKey *HDPublicKey, net *Network) ([]byte, error) {
@ -313,6 +388,7 @@ func (s *IncomingSwap) VerifyAndFulfill(userKey *HDPrivateKey, muunKey *HDPublic
SwapServerPublicKey: swapServerPublicKey,
ExpirationHeight: s.HtlcExpiration,
VerifyOutputAmount: true,
Collect: btcutil.Amount(s.CollectInSats),
}
err = coin.SignInput(0, &tx, userKey, muunKey)
if err != nil {

View File

@ -3,6 +3,8 @@ package libwallet
import (
"bytes"
"encoding/hex"
"errors"
"fmt"
"github.com/muun/libwallet/addresses"
@ -11,7 +13,6 @@ import (
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/pkg/errors"
)
type SigningExpectations struct {
@ -64,6 +65,7 @@ type InputIncomingSwap interface {
PaymentHash256() []byte
SwapServerPublicKey() string
ExpirationHeight() int64
CollectInSats() int64
}
type Input interface {
@ -105,7 +107,7 @@ func NewPartiallySignedTransaction(inputs *InputList, rawTx []byte) (*PartiallyS
tx := wire.NewMsgTx(0)
err := tx.Deserialize(bytes.NewReader(rawTx))
if err != nil {
return nil, errors.Wrapf(err, "failed to decode tx")
return nil, fmt.Errorf("failed to decode tx: %w", err)
}
return &PartiallySignedTransaction{tx: tx, inputs: inputs.Inputs()}, nil
@ -127,13 +129,13 @@ func (p *PartiallySignedTransaction) Sign(userKey *HDPrivateKey, muunKey *HDPubl
coins, err := p.coins(userKey.Network)
if err != nil {
return nil, errors.Wrapf(err, "could not convert input data to coin")
return nil, fmt.Errorf("could not convert input data to coin: %w", err)
}
for i, coin := range coins {
err = coin.SignInput(i, p.tx, userKey, muunKey)
if err != nil {
return nil, errors.Wrapf(err, "failed to sign input")
return nil, fmt.Errorf("failed to sign input: %w", err)
}
}
@ -145,13 +147,13 @@ func (p *PartiallySignedTransaction) FullySign(userKey, muunKey *HDPrivateKey) (
coins, err := p.coins(userKey.Network)
if err != nil {
return nil, errors.Wrapf(err, "could not convert input data to coin")
return nil, fmt.Errorf("could not convert input data to coin: %w", err)
}
for i, coin := range coins {
err = coin.FullySignInput(i, p.tx, userKey, muunKey)
if err != nil {
return nil, errors.Wrapf(err, "failed to sign input")
return nil, fmt.Errorf("failed to sign input: %w", err)
}
}
@ -168,11 +170,11 @@ func (p *PartiallySignedTransaction) Verify(expectations *SigningExpectations, u
// If we were to receive more than that, we consider it invalid.
if expectations.change != nil {
if len(p.tx.TxOut) != 2 {
return errors.Errorf("expected destination and change outputs but found %v", len(p.tx.TxOut))
return fmt.Errorf("expected destination and change outputs but found %v", len(p.tx.TxOut))
}
} else {
if len(p.tx.TxOut) != 1 {
return errors.Errorf("expected destination output only but found %v", len(p.tx.TxOut))
return fmt.Errorf("expected destination output only but found %v", len(p.tx.TxOut))
}
}
@ -207,12 +209,12 @@ func (p *PartiallySignedTransaction) Verify(expectations *SigningExpectations, u
// Fail if not destination output was found in the TX.
if toOutput == nil {
return errors.Errorf("destination output is not present")
return errors.New("destination output is not present")
}
// Verify destination output value matches expected amount
if toOutput.Value != expectedAmount {
return errors.Errorf("destination amount is mismatched. found %v expected %v", toOutput.Value, expectedAmount)
return fmt.Errorf("destination amount is mismatched. found %v expected %v", toOutput.Value, expectedAmount)
}
/*
@ -237,25 +239,25 @@ func (p *PartiallySignedTransaction) Verify(expectations *SigningExpectations, u
// Verify change output is spendable by the wallet.
if expectedChange != nil {
if changeOutput == nil {
return errors.Errorf("Change is not present")
return errors.New("change is not present")
}
expectedChangeAmount := actualTotal - expectedAmount - expectedFee
if changeOutput.Value != expectedChangeAmount {
return errors.Errorf("Change amount is mismatched. found %v expected %v",
return fmt.Errorf("change amount is mismatched. found %v expected %v",
changeOutput.Value, expectedChangeAmount)
}
derivedUserKey, err := userPublicKey.DeriveTo(expectedChange.DerivationPath())
if err != nil {
return errors.Wrapf(err, "failed to derive user key to change path %v",
expectedChange.DerivationPath())
return fmt.Errorf("failed to derive user key to change path %v: %w",
expectedChange.DerivationPath(), err)
}
derivedMuunKey, err := muunPublickKey.DeriveTo(expectedChange.DerivationPath())
if err != nil {
return errors.Wrapf(err, "failed to derive muun key to change path %v",
expectedChange.DerivationPath())
return fmt.Errorf("failed to derive muun key to change path %v: %w",
expectedChange.DerivationPath(), err)
}
expectedChangeAddress, err := addresses.Create(
@ -266,24 +268,24 @@ func (p *PartiallySignedTransaction) Verify(expectations *SigningExpectations, u
network.network,
)
if err != nil {
return errors.Wrapf(err, "failed to build the change address with version %v",
expectedChange.Version())
return fmt.Errorf("failed to build the change address with version %v: %w",
expectedChange.Version(), err)
}
if expectedChangeAddress.Address() != expectedChange.Address() {
return errors.Errorf("mismatched change address. found %v, expected %v",
return fmt.Errorf("mismatched change address. found %v, expected %v",
expectedChange.Address(), expectedChangeAddress.Address())
}
actualFee := actualTotal - expectedAmount - expectedChangeAmount
if actualFee != expectedFee {
return errors.Errorf("fee mismatched. found %v, expected %v", actualFee, expectedFee)
return fmt.Errorf("fee mismatched. found %v, expected %v", actualFee, expectedFee)
}
} else {
actualFee := actualTotal - expectedAmount
if actualFee >= expectedFee+dustThreshold {
return errors.Errorf("change output is too big to be burned as fee")
return errors.New("change output is too big to be burned as fee")
}
}
@ -300,11 +302,11 @@ func (p *PartiallySignedTransaction) Verify(expectations *SigningExpectations, u
func addressToScript(address string, network *Network) ([]byte, error) {
parsedAddress, err := btcutil.DecodeAddress(address, network.network)
if err != nil {
return nil, errors.Wrapf(err, "failed to parse address %v", address)
return nil, fmt.Errorf("failed to parse address %v: %w", address, err)
}
script, err := txscript.PayToAddrScript(parsedAddress)
if err != nil {
return nil, errors.Wrapf(err, "failed to generate script for address %v", address)
return nil, fmt.Errorf("failed to generate script for address %v: %w", address, err)
}
return script, nil
}
@ -313,7 +315,7 @@ func newTransaction(tx *wire.MsgTx) (*Transaction, error) {
var buf bytes.Buffer
err := tx.Serialize(&buf)
if err != nil {
return nil, errors.Wrapf(err, "failed to encode tx")
return nil, fmt.Errorf("failed to encode tx: %w", err)
}
return &Transaction{
@ -422,8 +424,9 @@ func createCoin(input Input, network *Network) (coin, error) {
PaymentHash256: swap.PaymentHash256(),
SwapServerPublicKey: swapServerPublicKey,
ExpirationHeight: swap.ExpirationHeight(),
Collect: btcutil.Amount(swap.CollectInSats()),
}, nil
default:
return nil, errors.Errorf("can't create coin from input version %v", version)
return nil, fmt.Errorf("can't create coin from input version %v", version)
}
}

View File

@ -1,8 +1,9 @@
package libwallet
import (
"fmt"
"github.com/btcsuite/btcd/btcec"
"github.com/pkg/errors"
)
type PublicKey struct {
@ -12,7 +13,7 @@ type PublicKey struct {
func NewPublicKeyFromBytes(bytes []byte) (*PublicKey, error) {
key, err := btcec.ParsePubKey(bytes, btcec.S256())
if err != nil {
return nil, errors.Wrapf(err, "NewPublicKeyFromBytes: failed to parse pub key")
return nil, fmt.Errorf("NewPublicKeyFromBytes: failed to parse pub key: %w", err)
}
return &PublicKey{key}, nil

View File

@ -4,6 +4,7 @@ import (
"crypto/hmac"
"crypto/rand"
"crypto/sha256"
"encoding/hex"
"errors"
fmt "fmt"
"strings"
@ -89,9 +90,14 @@ func ConvertToKey(code, salt string) (*btcec.PrivateKey, error) {
switch version {
case 1:
saltBytes, err := hex.DecodeString(salt)
if err != nil {
return nil, fmt.Errorf("failed to decode salt: %w", err)
}
input, err = scrypt.Key(
[]byte(code),
[]byte(salt),
saltBytes,
kdfIterations,
kdfBlockSize,
kdfParallelizationFactor,

View File

@ -2,24 +2,24 @@ package libwallet
import (
"crypto/sha256"
"fmt"
"github.com/btcsuite/btcd/txscript"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/pkg/errors"
)
func signNativeSegwitInput(index int, tx *wire.MsgTx, privateKey *HDPrivateKey, witnessScript []byte, amount btcutil.Amount) ([]byte, error) {
privKey, err := privateKey.key.ECPrivKey()
if err != nil {
return nil, errors.Wrapf(err, "failed to produce EC priv key for signing")
return nil, fmt.Errorf("failed to produce EC priv key for signing: %w", err)
}
sigHashes := txscript.NewTxSigHashes(tx)
sig, err := txscript.RawTxInWitnessSignature(tx, sigHashes, index, int64(amount), witnessScript, txscript.SigHashAll, privKey)
if err != nil {
return nil, errors.Wrapf(err, "failed to sign V4 input")
return nil, fmt.Errorf("failed to sign V4 input: %w", err)
}
return sig, nil
@ -44,20 +44,20 @@ func signNonNativeSegwitInput(index int, tx *wire.MsgTx, privateKey *HDPrivateKe
builder.AddData(redeemScript)
script, err := builder.Script()
if err != nil {
return nil, errors.Wrapf(err, "failed to generate signing script")
return nil, fmt.Errorf("failed to generate signing script: %w", err)
}
txInput.SignatureScript = script
privKey, err := privateKey.key.ECPrivKey()
if err != nil {
return nil, errors.Wrapf(err, "failed to produce EC priv key for signing")
return nil, fmt.Errorf("failed to produce EC priv key for signing: %w", err)
}
sigHashes := txscript.NewTxSigHashes(tx)
sig, err := txscript.RawTxInWitnessSignature(
tx, sigHashes, index, int64(amount), witnessScript, txscript.SigHashAll, privKey)
if err != nil {
return nil, errors.Wrapf(err, "failed to sign V3 input")
return nil, fmt.Errorf("failed to sign V3 input: %w", err)
}
return sig, nil

View File

@ -44,15 +44,22 @@ func Validate(
// Validate payment secret if it exists
if payload.MPP != nil {
paymentAddr := payload.MPP.PaymentAddr()
amountToForward := payload.ForwardingInfo().AmountToForward
total := payload.MultiPath().TotalMsat()
if !bytes.Equal(paymentAddr[:], paymentSecret) {
return errors.New("sphinx payment secret does not match")
}
if amount != 0 && payload.ForwardingInfo().AmountToForward > amount {
if amount != 0 && amountToForward > amount {
return fmt.Errorf(
"sphinx payment amount does not match (%v != %v)", amount, payload.ForwardingInfo().AmountToForward,
"sphinx payment amount does not match (%v != %v)", amount, amountToForward,
)
}
if amountToForward < total {
return fmt.Errorf("payment is multipart. forwarded amt = %v, total amt = %v", amountToForward, total)
}
}
return nil
}

View File

@ -1,11 +1,13 @@
package libwallet
import (
"errors"
"fmt"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/muun/libwallet/swaps"
"github.com/pkg/errors"
)
type coinSubmarineSwapV1 struct {
@ -24,7 +26,7 @@ func (c *coinSubmarineSwapV1) SignInput(index int, tx *wire.MsgTx, userKey *HDPr
userKey, err := userKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive user key")
return fmt.Errorf("failed to derive user key: %w", err)
}
witnessScript, err := swaps.CreateWitnessScriptSubmarineSwapV1(
@ -40,7 +42,7 @@ func (c *coinSubmarineSwapV1) SignInput(index int, tx *wire.MsgTx, userKey *HDPr
redeemScript, err := createNonNativeSegwitRedeemScript(witnessScript)
if err != nil {
return errors.Wrapf(err, "failed to build reedem script for signing")
return fmt.Errorf("failed to build reedem script for signing: %w", err)
}
sig, err := signNonNativeSegwitInput(

View File

@ -1,11 +1,13 @@
package libwallet
import (
"errors"
"fmt"
"github.com/btcsuite/btcd/chaincfg"
"github.com/btcsuite/btcd/wire"
"github.com/btcsuite/btcutil"
"github.com/muun/libwallet/swaps"
"github.com/pkg/errors"
)
type coinSubmarineSwapV2 struct {
@ -26,11 +28,11 @@ func (c *coinSubmarineSwapV2) SignInput(index int, tx *wire.MsgTx, userKey *HDPr
userKey, err := userKey.DeriveTo(c.KeyPath)
if err != nil {
return errors.Wrapf(err, "failed to derive user key")
return fmt.Errorf("failed to derive user key: %w", err)
}
if len(c.ServerSignature) == 0 {
return errors.Errorf("Swap server must provide signature")
return errors.New("swap server must provide signature")
}
witnessScript, err := swaps.CreateWitnessScriptSubmarineSwapV2(

View File

@ -11,7 +11,6 @@ import (
"github.com/btcsuite/btcutil"
"github.com/btcsuite/btcutil/hdkeychain"
"github.com/lightningnetwork/lnd/zpay32"
"github.com/pkg/errors"
)
func (swap *SubmarineSwap) validateV2(rawInvoice string, userPublicKey, muunPublicKey *KeyDescriptor, originalExpirationInBlocks int64, network *chaincfg.Params) error {
@ -98,7 +97,7 @@ func (swap *SubmarineSwap) validateV2(rawInvoice string, userPublicKey, muunPubl
if len(swap.PreimageInHex) > 0 {
preimage, err := hex.DecodeString(swap.PreimageInHex)
if err != nil {
return errors.Wrapf(err, "preimagehex is not actually hex 🤔")
return fmt.Errorf("preimageInHex is not valid hex: %w", err)
}
calculatedPaymentHash := sha256.Sum256(preimage)

View File

@ -1,6 +1,7 @@
package walletdb
import (
"errors"
"log"
"time"
@ -45,7 +46,10 @@ func Open(path string) (*DB, error) {
}
func migrate(db *gorm.DB) error {
m := gormigrate.New(db, gormigrate.DefaultOptions, []*gormigrate.Migration{
opts := gormigrate.Options{
UseTransaction: true,
}
m := gormigrate.New(db, &opts, []*gormigrate.Migration{
{
ID: "initial",
Migrate: func(tx *gorm.DB) error {
@ -59,7 +63,14 @@ func migrate(db *gorm.DB) error {
State string
UsedAt *time.Time
}
return tx.CreateTable(&Invoice{}).Error
// This guard exists because at some point migrations were run outside a
// transactional context and a user experimented problems with an invoices
// table that was already created but whose migration had not been properly
// recorded.
if !tx.HasTable(&Invoice{}) {
return tx.CreateTable(&Invoice{}).Error
}
return nil
},
Rollback: func(tx *gorm.DB) error {
return tx.DropTable("invoices").Error
@ -90,6 +101,11 @@ func (d *DB) SaveInvoice(invoice *Invoice) error {
func (d *DB) FindFirstUnusedInvoice() (*Invoice, error) {
var invoice Invoice
if res := d.db.Where(&Invoice{State: InvoiceStateRegistered}).First(&invoice); res.Error != nil {
if errors.Is(res.Error, gorm.ErrRecordNotFound) {
return nil, nil
}
return nil, res.Error
}
invoice.ShortChanId = invoice.ShortChanId | (1 << 63)

202
vendor/github.com/pdfcpu/pdfcpu/LICENSE.txt generated vendored Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,7 @@
// generated by "go run gen.go". DO NOT EDIT.
package config
// ConfigFileBytes is a byteslice representing config.yml.
var ConfigFileBytes = []byte{
35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 10, 35, 32, 68, 101, 102, 97, 117, 108, 116, 32, 99, 111, 110, 102, 105, 103, 117, 114, 97, 116, 105, 111, 110, 32, 35, 10, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 10, 10, 114, 101, 97, 100, 101, 114, 49, 53, 58, 32, 116, 114, 117, 101, 10, 100, 101, 99, 111, 100, 101, 65, 108, 108, 83, 116, 114, 101, 97, 109, 115, 58, 32, 102, 97, 108, 115, 101, 10, 10, 35, 32, 118, 97, 108, 105, 100, 97, 116, 105, 111, 110, 77, 111, 100, 101, 58, 32, 10, 35, 32, 86, 97, 108, 105, 100, 97, 116, 105, 111, 110, 83, 116, 114, 105, 99, 116, 44, 10, 35, 32, 86, 97, 108, 105, 100, 97, 116, 105, 111, 110, 82, 101, 108, 97, 120, 101, 100, 44, 10, 35, 32, 86, 97, 108, 105, 100, 97, 116, 105, 111, 110, 78, 111, 110, 101, 10, 118, 97, 108, 105, 100, 97, 116, 105, 111, 110, 77, 111, 100, 101, 58, 32, 86, 97, 108, 105, 100, 97, 116, 105, 111, 110, 82, 101, 108, 97, 120, 101, 100, 10, 10, 35, 32, 101, 111, 108, 32, 102, 111, 114, 32, 119, 114, 105, 116, 105, 110, 103, 58, 10, 35, 32, 69, 111, 108, 76, 70, 10, 35, 32, 69, 111, 108, 67, 82, 10, 35, 32, 69, 111, 108, 67, 82, 76, 70, 10, 101, 111, 108, 58, 32, 69, 111, 108, 76, 70, 10, 10, 119, 114, 105, 116, 101, 79, 98, 106, 101, 99, 116, 83, 116, 114, 101, 97, 109, 58, 32, 116, 114, 117, 101, 10, 119, 114, 105, 116, 101, 88, 82, 101, 102, 83, 116, 114, 101, 97, 109, 58, 32, 116, 114, 117, 101, 10, 101, 110, 99, 114, 121, 112, 116, 85, 115, 105, 110, 103, 65, 69, 83, 58, 32, 116, 114, 117, 101, 10, 10, 35, 32, 101, 110, 99, 114, 121, 112, 116, 75, 101, 121, 76, 101, 110, 103, 116, 104, 58, 32, 109, 97, 120, 32, 50, 53, 54, 32, 10, 101, 110, 99, 114, 121, 112, 116, 75, 101, 121, 76, 101, 110, 103, 116, 104, 58, 32, 50, 53, 54, 10, 10, 35, 32, 112, 101, 114, 109, 105, 115, 115, 105, 111, 110, 115, 32, 102, 111, 114, 32, 101, 110, 99, 114, 121, 112, 116, 101, 100, 32, 102, 105, 108, 101, 115, 58, 32, 10, 35, 32, 45, 51, 57, 48, 49, 32, 61, 32, 48, 120, 70, 48, 67, 51, 32, 40, 80, 101, 114, 109, 105, 115, 115, 105, 111, 110, 115, 78, 111, 110, 101, 41, 10, 35, 32, 32, 32, 32, 45, 49, 32, 61, 32, 48, 120, 70, 70, 70, 70, 32, 40, 80, 101, 114, 109, 105, 115, 115, 105, 111, 110, 115, 65, 108, 108, 41, 10, 112, 101, 114, 109, 105, 115, 115, 105, 111, 110, 115, 58, 32, 45, 51, 57, 48, 49, 10, 10, 35, 32, 100, 105, 115, 112, 108, 97, 121, 85, 110, 105, 116, 58, 10, 35, 32, 112, 111, 105, 110, 116, 115, 10, 35, 32, 105, 110, 99, 104, 101, 115, 10, 35, 32, 99, 109, 10, 35, 32, 109, 109, 10, 117, 110, 105, 116, 58, 32, 112, 111, 105, 110, 116, 115}

View File

@ -0,0 +1,37 @@
#########################
# Default configuration #
#########################
reader15: true
decodeAllStreams: false
# validationMode:
# ValidationStrict,
# ValidationRelaxed,
# ValidationNone
validationMode: ValidationRelaxed
# eol for writing:
# EolLF
# EolCR
# EolCRLF
eol: EolLF
writeObjectStream: true
writeXRefStream: true
encryptUsingAES: true
# encryptKeyLength: max 256
encryptKeyLength: 256
# permissions for encrypted files:
# -3901 = 0xF0C3 (PermissionsNone)
# -1 = 0xFFFF (PermissionsAll)
permissions: -3901
# displayUnit:
# points
# inches
# cm
# mm
unit: points

View File

@ -0,0 +1,55 @@
/*
Copyright 2018 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package metrics provides font metrics for the PDF standard fonts.
package metrics
// The PostScript names of the 14 Type 1 fonts, aka the PDF core font set, are as follows:
//
// Times-Roman,
// Helvetica,
// Courier,
// Symbol,
// Times-Bold,
// Helvetica-Bold,
// Courier-Bold,
// ZapfDingbats,
// Times-Italic,
// Helvetica- Oblique,
// Courier-Oblique,
// Times-BoldItalic,
// Helvetica-BoldOblique,
// Courier-BoldOblique
// CoreFontCharWidth returns the character width for fontName and c in glyph space units.
func CoreFontCharWidth(fontName string, c int) int {
var m map[int]string
switch fontName {
case "Symbol":
m = SymbolGlyphMap
case "ZapfDingbats":
m = ZapfDingbatsGlyphMap
default:
m = WinAnsiGlyphMap
}
glyphName := m[c]
fm := CoreFontMetrics[fontName]
w, ok := fm.W[glyphName]
if !ok {
w = 1000 //m.W["bullet"]
}
return w
}

View File

@ -0,0 +1,680 @@
// generated by "go run gen.go". DO NOT EDIT.
package metrics
import (
"github.com/pdfcpu/pdfcpu/pkg/types"
)
// WinAnsiGlyphMap is a glyph lookup table for CP1252 character codes.
// See Annex D.2 Latin Character Set and Encodings.
var WinAnsiGlyphMap = map[int]string{
32: "space", // U+0020 ' '
33: "exclam", // U+0021 '!'
34: "quotedbl", // U+0022 '"'
35: "numbersign", // U+0023 '#'
36: "dollar", // U+0024 '$'
37: "percent", // U+0025 '%'
38: "ampersand", // U+0026 '&'
39: "quotesingle", // U+0027 '''
40: "parenleft", // U+0028 '('
41: "parenright", // U+0029 ')'
42: "asterisk", // U+002A '*'
43: "plus", // U+002B '+'
44: "comma", // U+002C ','
45: "hyphen", // U+002D '-'
46: "period", // U+002E '.'
47: "slash", // U+002F '/'
48: "zero", // U+0030 '0'
49: "one", // U+0031 '1'
50: "two", // U+0032 '2'
51: "three", // U+0033 '3'
52: "four", // U+0034 '4'
53: "five", // U+0035 '5'
54: "six", // U+0036 '6'
55: "seven", // U+0037 '7'
56: "eight", // U+0038 '8'
57: "nine", // U+0039 '9'
58: "colon", // U+003A ':'
59: "semicolon", // U+003B ';'
60: "less", // U+003C '<'
61: "equal", // U+003D '='
62: "greater", // U+003E '>'
63: "question", // U+003F '?'
64: "at", // U+0040 '@'
65: "A", // U+0041 'A'
66: "B", // U+0042 'B'
67: "C", // U+0043 'C'
68: "D", // U+0044 'D'
69: "E", // U+0045 'E'
70: "F", // U+0046 'F'
71: "G", // U+0047 'G'
72: "H", // U+0048 'H'
73: "I", // U+0049 'I'
74: "J", // U+004A 'J'
75: "K", // U+004B 'K'
76: "L", // U+004C 'L'
77: "M", // U+004D 'M'
78: "N", // U+004E 'N'
79: "O", // U+004F 'O'
80: "P", // U+0050 'P'
81: "Q", // U+0051 'Q'
82: "R", // U+0052 'R'
83: "S", // U+0053 'S'
84: "T", // U+0054 'T'
85: "U", // U+0055 'U'
86: "V", // U+0056 'V'
87: "W", // U+0057 'W'
88: "X", // U+0058 'X'
89: "Y", // U+0059 'Y'
90: "Z", // U+005A 'Z'
91: "bracketleft", // U+005B '['
92: "backslash", // U+005C '\'
93: "bracketright", // U+005D ']'
94: "asciicircum", // U+005E '^'
95: "underscore", // U+005F '_'
96: "grave", // U+0060 '`'
97: "a", // U+0061 'a'
98: "b", // U+0062 'b'
99: "c", // U+0063 'c'
100: "d", // U+0064 'd'
101: "e", // U+0065 'e'
102: "f", // U+0066 'f'
103: "g", // U+0067 'g'
104: "h", // U+0068 'h'
105: "i", // U+0069 'i'
106: "j", // U+006A 'j'
107: "k", // U+006B 'k'
108: "l", // U+006C 'l'
109: "m", // U+006D 'm'
110: "n", // U+006E 'n'
111: "o", // U+006F 'o'
112: "p", // U+0070 'p'
113: "q", // U+0071 'q'
114: "r", // U+0072 'r'
115: "s", // U+0073 's'
116: "t", // U+0074 't'
117: "u", // U+0075 'u'
118: "v", // U+0076 'v'
119: "w", // U+0077 'w'
120: "x", // U+0078 'x'
121: "y", // U+0079 'y'
122: "z", // U+007A 'z'
123: "braceleft", // U+007B '{'
124: "bar", // U+007C '|'
125: "braceright", // U+007D '}'
126: "asciitilde", // U+007E '~'
128: "Euro", // U+0080
130: "quotesinglbase", // U+0082
131: "florin", // U+0083
132: "quotedblbase", // U+0084
133: "ellipsis", // U+0085
134: "dagger", // U+0086
135: "daggerdbl", // U+0087
136: "circumflex", // U+0088
137: "perthousand", // U+0089
138: "Scaron", // U+008A
139: "guilsinglleft", // U+008B
140: "OE", // U+008C
142: "Zcaron", // U+008E
145: "quoteleft", // U+0091
146: "quoteright", // U+0092
147: "quotedblleft", // U+0093
148: "quotedblright", // U+0094
149: "bullet", // U+0095
150: "endash", // U+0096
151: "emdash", // U+0097
152: "tilde", // U+0098
153: "trademark", // U+0099
154: "scaron", // U+009A
155: "guilsinglright", // U+009B
156: "oe", // U+009C
158: "zcaron", // U+009E
159: "Ydieresis", // U+009F
161: "exclamdown", // U+00A1 '¡'
162: "cent", // U+00A2 '¢'
163: "sterling", // U+00A3 '£'
164: "currency", // U+00A4 '¤'
165: "yen", // U+00A5 '¥'
166: "brokenbar", // U+00A6 '¦'
167: "section", // U+00A7 '§'
168: "dieresis", // U+00A8 '¨'
169: "copyright", // U+00A9 '©'
170: "ordfeminine", // U+00AA 'ª'
171: "guillemotleft", // U+00AB '«'
172: "logicalnot", // U+00AC '¬'
174: "registered", // U+00AE '®'
175: "macron", // U+00AF '¯'
176: "degree", // U+00B0 '°'
177: "plusminus", // U+00B1 '±'
178: "twosuperior", // U+00B2 '²'
179: "threesuperior", // U+00B3 '³'
180: "acute", // U+00B4 '´'
181: "mu", // U+00B5 'µ'
182: "paragraph", // U+00B6 '¶'
183: "periodcentered", // U+00B7 '·'
184: "cedilla", // U+00B8 '¸'
185: "onesuperior", // U+00B9 '¹'
186: "ordmasculine", // U+00BA 'º'
187: "guillemotright", // U+00BB '»'
188: "onequarter", // U+00BC '¼'
189: "onehalf", // U+00BD '½'
190: "threequarters", // U+00BE '¾'
191: "questiondown", // U+00BF '¿'
192: "Agrave", // U+00C0 'À'
193: "Aacute", // U+00C1 'Á'
194: "Acircumflex", // U+00C2 'Â'
195: "Atilde", // U+00C3 'Ã'
196: "Adieresis", // U+00C4 'Ä'
197: "Aring", // U+00C5 'Å'
198: "AE", // U+00C6 'Æ'
199: "Ccedilla", // U+00C7 'Ç'
200: "Egrave", // U+00C8 'È'
201: "Eacute", // U+00C9 'É'
202: "Ecircumflex", // U+00CA 'Ê'
203: "Edieresis", // U+00CB 'Ë'
204: "Igrave", // U+00CC 'Ì'
205: "Iacute", // U+00CD 'Í'
206: "Icircumflex", // U+00CE 'Î'
207: "Idieresis", // U+00CF 'Ï'
208: "Eth", // U+00D0 'Ð'
209: "Ntilde", // U+00D1 'Ñ'
210: "Ograve", // U+00D2 'Ò'
211: "Oacute", // U+00D3 'Ó'
212: "Ocircumflex", // U+00D4 'Ô'
213: "Otilde", // U+00D5 'Õ'
214: "Odieresis", // U+00D6 'Ö'
215: "multiply", // U+00D7 '×'
216: "Oslash", // U+00D8 'Ø'
217: "Ugrave", // U+00D9 'Ù'
218: "Uacute", // U+00DA 'Ú'
219: "Ucircumflex", // U+00DB 'Û'
220: "Udieresis", // U+00DC 'Ü'
221: "Yacute", // U+00DD 'Ý'
222: "Thorn", // U+00DE 'Þ'
223: "germandbls", // U+00DF 'ß'
224: "agrave", // U+00E0 'à'
225: "aacute", // U+00E1 'á'
226: "acircumflex", // U+00E2 'â'
227: "atilde", // U+00E3 'ã'
228: "adieresis", // U+00E4 'ä'
229: "aring", // U+00E5 'å'
230: "ae", // U+00E6 'æ'
231: "ccedilla", // U+00E7 'ç'
232: "egrave", // U+00E8 'è'
233: "eacute", // U+00E9 'é'
234: "ecircumflex", // U+00EA 'ê'
235: "edieresis", // U+00EB 'ë'
236: "igrave", // U+00EC 'ì'
237: "iacute", // U+00ED 'í'
238: "icircumflex", // U+00EE 'î'
239: "idieresis", // U+00EF 'ï'
240: "eth", // U+00F0 'ð'
241: "ntilde", // U+00F1 'ñ'
242: "ograve", // U+00F2 'ò'
243: "oacute", // U+00F3 'ó'
244: "ocircumflex", // U+00F4 'ô'
245: "otilde", // U+00F5 'õ'
246: "odieresis", // U+00F6 'ö'
247: "divide", // U+00F7 '÷'
248: "oslash", // U+00F8 'ø'
249: "ugrave", // U+00F9 'ù'
250: "uacute", // U+00FA 'ú'
251: "ucircumflex", // U+00FB 'û'
252: "udieresis", // U+00FC 'ü'
253: "yacute", // U+00FD 'ý'
254: "thorn", // U+00FE 'þ'
255: "ydieresis", // U+00FF 'ÿ'
}
// SymbolGlyphMap is a glyph lookup table for Symbol character codes.
// See Annex D.5 Symbol Set and Encoding.
var SymbolGlyphMap = map[int]string{
32: "space", // U+0020 ' '
33: "exclam", // U+0021 '!'
34: "universal", // U+0022 '"'
35: "numbersign", // U+0023 '#'
36: "existential", // U+0024 '$'
37: "percent", // U+0025 '%'
38: "ampersand", // U+0026 '&'
39: "suchthat", // U+0027 '''
40: "parenleft", // U+0028 '('
41: "parenright", // U+0029 ')'
42: "asteriskmath", // U+002A '*'
43: "plus", // U+002B '+'
44: "comma", // U+002C ','
45: "minus", // U+002D '-'
46: "period", // U+002E '.'
47: "slash", // U+002F '/'
48: "zero", // U+0030 '0'
49: "one", // U+0031 '1'
50: "two", // U+0032 '2'
51: "three", // U+0033 '3'
52: "four", // U+0034 '4'
53: "five", // U+0035 '5'
54: "six", // U+0036 '6'
55: "seven", // U+0037 '7'
56: "eight", // U+0038 '8'
57: "nine", // U+0039 '9'
58: "colon", // U+003A ':'
59: "semicolon", // U+003B ';'
60: "less", // U+003C '<'
61: "equal", // U+003D '='
62: "greater", // U+003E '>'
63: "question", // U+003F '?'
64: "congruent", // U+0040 '@'
65: "Alpha", // U+0041 'A'
66: "Beta", // U+0042 'B'
67: "Chi", // U+0043 'C'
68: "Delta", // U+0044 'D'
69: "Epsilon", // U+0045 'E'
70: "Phi", // U+0046 'F'
71: "Gamma", // U+0047 'G'
72: "Eta", // U+0048 'H'
73: "Iota", // U+0049 'I'
74: "theta1", // U+004A 'J'
75: "Kappa", // U+004B 'K'
76: "Lambda", // U+004C 'L'
77: "Mu", // U+004D 'M'
78: "Nu", // U+004E 'N'
79: "Omicron", // U+004F 'O'
80: "Pi", // U+0050 'P'
81: "Theta", // U+0051 'Q'
82: "Rho", // U+0052 'R'
83: "Sigma", // U+0053 'S'
84: "Tau", // U+0054 'T'
85: "Upsilon", // U+0055 'U'
86: "sigma1", // U+0056 'V'
87: "Omega", // U+0057 'W'
88: "Xi", // U+0058 'X'
89: "Psi", // U+0059 'Y'
90: "Zeta", // U+005A 'Z'
91: "bracketleft", // U+005B '['
92: "therefore", // U+005C '\'
93: "bracketright", // U+005D ']'
94: "perpendicular", // U+005E '^'
95: "underscore", // U+005F '_'
96: "radicalex", // U+0060 '`'
97: "alpha", // U+0061 'a'
98: "beta", // U+0062 'b'
99: "chi", // U+0063 'c'
100: "delta", // U+0064 'd'
101: "epsilon", // U+0065 'e'
102: "phi", // U+0066 'f'
103: "gamma", // U+0067 'g'
104: "eta", // U+0068 'h'
105: "iota", // U+0069 'i'
106: "phi1", // U+006A 'j'
107: "kappa", // U+006B 'k'
108: "lambda", // U+006C 'l'
109: "mu", // U+006D 'm'
110: "nu", // U+006E 'n'
111: "omicron", // U+006F 'o'
112: "pi", // U+0070 'p'
113: "theta", // U+0071 'q'
114: "rho", // U+0072 'r'
115: "sigma", // U+0073 's'
116: "tau", // U+0074 't'
117: "upsilon", // U+0075 'u'
118: "omega1", // U+0076 'v'
119: "omega", // U+0077 'w'
120: "xi", // U+0078 'x'
121: "psi", // U+0079 'y'
122: "zeta", // U+007A 'z'
123: "braceleft", // U+007B '{'
124: "bar", // U+007C '|'
125: "braceright", // U+007D '}'
126: "similar", // U+007E '~'
160: "Euro", // U+00A0
161: "Upsilon1", // U+00A1 '¡'
162: "minute", // U+00A2 '¢'
163: "lessequal", // U+00A3 '£'
164: "fraction", // U+00A4 '¤'
165: "infinity", // U+00A5 '¥'
166: "florin", // U+00A6 '¦'
167: "club", // U+00A7 '§'
168: "diamond", // U+00A8 '¨'
169: "heart", // U+00A9 '©'
170: "spade", // U+00AA 'ª'
171: "arrowboth", // U+00AB '«'
172: "arrowleft", // U+00AC '¬'
173: "arrowup", // U+00AD
174: "arrowright", // U+00AE '®'
175: "arrowdown", // U+00AF '¯'
176: "degree", // U+00B0 '°'
177: "plusminus", // U+00B1 '±'
178: "second", // U+00B2 '²'
179: "greaterequal", // U+00B3 '³'
180: "multiply", // U+00B4 '´'
181: "proportional", // U+00B5 'µ'
182: "partialdiff", // U+00B6 '¶'
183: "bullet", // U+00B7 '·'
184: "divide", // U+00B8 '¸'
185: "notequal", // U+00B9 '¹'
186: "equivalence", // U+00BA 'º'
187: "approxequal", // U+00BB '»'
188: "ellipsis", // U+00BC '¼'
189: "arrowvertex", // U+00BD '½'
190: "arrowhorizex", // U+00BE '¾'
191: "carriagereturn", // U+00BF '¿'
192: "aleph", // U+00C0 'À'
193: "Ifraktur", // U+00C1 'Á'
194: "Rfraktur", // U+00C2 'Â'
195: "weierstrass", // U+00C3 'Ã'
196: "circlemultiply", // U+00C4 'Ä'
197: "circleplus", // U+00C5 'Å'
198: "emptyset", // U+00C6 'Æ'
199: "intersection", // U+00C7 'Ç'
200: "union", // U+00C8 'È'
201: "propersuperset", // U+00C9 'É'
202: "reflexsuperset", // U+00CA 'Ê'
203: "notsubset", // U+00CB 'Ë'
204: "propersubset", // U+00CC 'Ì'
205: "reflexsubset", // U+00CD 'Í'
206: "element", // U+00CE 'Î'
207: "notelement", // U+00CF 'Ï'
208: "angle", // U+00D0 'Ð'
209: "gradient", // U+00D1 'Ñ'
210: "registerserif", // U+00D2 'Ò'
211: "copyrightserif", // U+00D3 'Ó'
212: "trademarkserif", // U+00D4 'Ô'
213: "product", // U+00D5 'Õ'
214: "radical", // U+00D6 'Ö'
215: "dotmath", // U+00D7 '×'
216: "logicalnot", // U+00D8 'Ø'
217: "logicaland", // U+00D9 'Ù'
218: "logicalor", // U+00DA 'Ú'
219: "arrowdblboth", // U+00DB 'Û'
220: "arrowdblleft", // U+00DC 'Ü'
221: "arrowdblup", // U+00DD 'Ý'
222: "arrowdblright", // U+00DE 'Þ'
223: "arrowdbldown", // U+00DF 'ß'
224: "lozenge", // U+00E0 'à'
225: "angleleft", // U+00E1 'á'
226: "registersans", // U+00E2 'â'
227: "copyrightsans", // U+00E3 'ã'
228: "trademarksans", // U+00E4 'ä'
229: "summation", // U+00E5 'å'
230: "parenlefttp", // U+00E6 'æ'
231: "parenleftex", // U+00E7 'ç'
232: "parenleftbt", // U+00E8 'è'
233: "bracketlefttp", // U+00E9 'é'
234: "bracketleftex", // U+00EA 'ê'
235: "bracketleftbt", // U+00EB 'ë'
236: "bracelefttp", // U+00EC 'ì'
237: "braceleftmid", // U+00ED 'í'
238: "braceleftbt", // U+00EE 'î'
239: "braceex", // U+00EF 'ï'
241: "angleright", // U+00F1 'ñ'
242: "integral", // U+00F2 'ò'
243: "integraltp", // U+00F3 'ó'
244: "integralex", // U+00F4 'ô'
245: "integralbt", // U+00F5 'õ'
246: "parenrighttp", // U+00F6 'ö'
247: "parenrightex", // U+00F7 '÷'
248: "parenrightbt", // U+00F8 'ø'
249: "bracketrighttp", // U+00F9 'ù'
250: "bracketrightex", // U+00FA 'ú'
251: "bracketrightbt", // U+00FB 'û'
252: "bracerighttp", // U+00FC 'ü'
253: "bracerightmid", // U+00FD 'ý'
254: "bracerightbt", // U+00FE 'þ'
}
// ZapfDingbatsGlyphMap is a glyph lookup table for ZapfDingbats character codes.
// See Annex D.6 ZapfDingbats Set and Encoding
var ZapfDingbatsGlyphMap = map[int]string{
32: "space", // U+0020 ' '
33: "a1", // U+0021 '!'
34: "a2", // U+0022 '"'
35: "a202", // U+0023 '#'
36: "a3", // U+0024 '$'
37: "a4", // U+0025 '%'
38: "a5", // U+0026 '&'
39: "a119", // U+0027 '''
40: "a118", // U+0028 '('
41: "a117", // U+0029 ')'
42: "a11", // U+002A '*'
43: "a12", // U+002B '+'
44: "a13", // U+002C ','
45: "a14", // U+002D '-'
46: "a15", // U+002E '.'
47: "a16", // U+002F '/'
48: "a105", // U+0030 '0'
49: "a17", // U+0031 '1'
50: "a18", // U+0032 '2'
51: "a19", // U+0033 '3'
52: "a20", // U+0034 '4'
53: "a21", // U+0035 '5'
54: "a22", // U+0036 '6'
55: "a23", // U+0037 '7'
56: "a24", // U+0038 '8'
57: "a25", // U+0039 '9'
58: "a26", // U+003A ':'
59: "a27", // U+003B ';'
60: "a28", // U+003C '<'
61: "a6", // U+003D '='
62: "a7", // U+003E '>'
63: "a8", // U+003F '?'
64: "a9", // U+0040 '@'
65: "a10", // U+0041 'A'
66: "a29", // U+0042 'B'
67: "a30", // U+0043 'C'
68: "a31", // U+0044 'D'
69: "a32", // U+0045 'E'
70: "a33", // U+0046 'F'
71: "a34", // U+0047 'G'
72: "a35", // U+0048 'H'
73: "a36", // U+0049 'I'
74: "a37", // U+004A 'J'
75: "a38", // U+004B 'K'
76: "a39", // U+004C 'L'
77: "a40", // U+004D 'M'
78: "a41", // U+004E 'N'
79: "a42", // U+004F 'O'
80: "a43", // U+0050 'P'
81: "a44", // U+0051 'Q'
82: "a45", // U+0052 'R'
83: "a46", // U+0053 'S'
84: "a47", // U+0054 'T'
85: "a48", // U+0055 'U'
86: "a49", // U+0056 'V'
87: "a50", // U+0057 'W'
88: "a51", // U+0058 'X'
89: "a52", // U+0059 'Y'
90: "a53", // U+005A 'Z'
91: "a54", // U+005B '['
92: "a55", // U+005C '\'
93: "a56", // U+005D ']'
94: "a57", // U+005E '^'
95: "a58", // U+005F '_'
96: "a59", // U+0060 '`'
97: "a60", // U+0061 'a'
98: "a61", // U+0062 'b'
99: "a62", // U+0063 'c'
100: "a63", // U+0064 'd'
101: "a64", // U+0065 'e'
102: "a65", // U+0066 'f'
103: "a66", // U+0067 'g'
104: "a67", // U+0068 'h'
105: "a68", // U+0069 'i'
106: "a69", // U+006A 'j'
107: "a70", // U+006B 'k'
108: "a71", // U+006C 'l'
109: "a72", // U+006D 'm'
110: "a73", // U+006E 'n'
111: "a74", // U+006F 'o'
112: "a203", // U+0070 'p'
113: "a75", // U+0071 'q'
114: "a204", // U+0072 'r'
115: "a76", // U+0073 's'
116: "a77", // U+0074 't'
117: "a78", // U+0075 'u'
118: "a79", // U+0076 'v'
119: "a81", // U+0077 'w'
120: "a82", // U+0078 'x'
121: "a83", // U+0079 'y'
122: "a84", // U+007A 'z'
123: "a97", // U+007B '{'
124: "a98", // U+007C '|'
125: "a99", // U+007D '}'
126: "a100", // U+007E '~'
161: "a101", // U+00A1 '¡'
162: "a102", // U+00A2 '¢'
163: "a103", // U+00A3 '£'
164: "a104", // U+00A4 '¤'
165: "a106", // U+00A5 '¥'
166: "a107", // U+00A6 '¦'
167: "a108", // U+00A7 '§'
168: "a112", // U+00A8 '¨'
169: "a111", // U+00A9 '©'
170: "a110", // U+00AA 'ª'
171: "a109", // U+00AB '«'
172: "a120", // U+00AC '¬'
173: "a121", // U+00AD
174: "a122", // U+00AE '®'
175: "a123", // U+00AF '¯'
176: "a124", // U+00B0 '°'
177: "a125", // U+00B1 '±'
178: "a126", // U+00B2 '²'
179: "a127", // U+00B3 '³'
180: "a128", // U+00B4 '´'
181: "a129", // U+00B5 'µ'
182: "a130", // U+00B6 '¶'
183: "a131", // U+00B7 '·'
184: "a132", // U+00B8 '¸'
185: "a133", // U+00B9 '¹'
186: "a134", // U+00BA 'º'
187: "a135", // U+00BB '»'
188: "a136", // U+00BC '¼'
189: "a137", // U+00BD '½'
190: "a138", // U+00BE '¾'
191: "a139", // U+00BF '¿'
192: "a140", // U+00C0 'À'
193: "a141", // U+00C1 'Á'
194: "a142", // U+00C2 'Â'
195: "a143", // U+00C3 'Ã'
196: "a144", // U+00C4 'Ä'
197: "a145", // U+00C5 'Å'
198: "a146", // U+00C6 'Æ'
199: "a147", // U+00C7 'Ç'
200: "a148", // U+00C8 'È'
201: "a149", // U+00C9 'É'
202: "a150", // U+00CA 'Ê'
203: "a151", // U+00CB 'Ë'
204: "a152", // U+00CC 'Ì'
205: "a153", // U+00CD 'Í'
206: "a154", // U+00CE 'Î'
207: "a155", // U+00CF 'Ï'
208: "a156", // U+00D0 'Ð'
209: "a157", // U+00D1 'Ñ'
210: "a158", // U+00D2 'Ò'
211: "a159", // U+00D3 'Ó'
212: "a160", // U+00D4 'Ô'
213: "a161", // U+00D5 'Õ'
214: "a163", // U+00D6 'Ö'
215: "a164", // U+00D7 '×'
216: "a196", // U+00D8 'Ø'
217: "a165", // U+00D9 'Ù'
218: "a192", // U+00DA 'Ú'
219: "a166", // U+00DB 'Û'
220: "a167", // U+00DC 'Ü'
221: "a168", // U+00DD 'Ý'
222: "a169", // U+00DE 'Þ'
223: "a170", // U+00DF 'ß'
224: "a171", // U+00E0 'à'
225: "a172", // U+00E1 'á'
226: "a173", // U+00E2 'â'
227: "a162", // U+00E3 'ã'
228: "a174", // U+00E4 'ä'
229: "a175", // U+00E5 'å'
230: "a176", // U+00E6 'æ'
231: "a177", // U+00E7 'ç'
232: "a178", // U+00E8 'è'
233: "a179", // U+00E9 'é'
234: "a193", // U+00EA 'ê'
235: "a180", // U+00EB 'ë'
236: "a199", // U+00EC 'ì'
237: "a181", // U+00ED 'í'
238: "a200", // U+00EE 'î'
239: "a182", // U+00EF 'ï'
241: "a201", // U+00F1 'ñ'
242: "a183", // U+00F2 'ò'
243: "a184", // U+00F3 'ó'
244: "a197", // U+00F4 'ô'
245: "a185", // U+00F5 'õ'
246: "a194", // U+00F6 'ö'
247: "a198", // U+00F7 '÷'
248: "a186", // U+00F8 'ø'
249: "a195", // U+00F9 'ù'
250: "a187", // U+00FA 'ú'
251: "a188", // U+00FB 'û'
252: "a189", // U+00FC 'ü'
253: "a190", // U+00FD 'ý'
254: "a191", // U+00FE 'þ'
}
type fontMetrics struct {
FBox *types.Rectangle // font box
W map[string]int // glyph widths
}
// CoreFontMetrics represents font metrics for the Adobe standard type 1 core fonts.
var CoreFontMetrics = map[string]fontMetrics{
"Courier-Bold": {
types.NewRectangle(-113.0, -250.0, 749.0, 801.0),
map[string]int{"space": 600, "exclam": 600, "quotedbl": 600, "numbersign": 600, "dollar": 600, "percent": 600, "ampersand": 600, "quoteright": 600, "parenleft": 600, "parenright": 600, "asterisk": 600, "plus": 600, "comma": 600, "hyphen": 600, "period": 600, "slash": 600, "zero": 600, "one": 600, "two": 600, "three": 600, "four": 600, "five": 600, "six": 600, "seven": 600, "eight": 600, "nine": 600, "colon": 600, "semicolon": 600, "less": 600, "equal": 600, "greater": 600, "question": 600, "at": 600, "A": 600, "B": 600, "C": 600, "D": 600, "E": 600, "F": 600, "G": 600, "H": 600, "I": 600, "J": 600, "K": 600, "L": 600, "M": 600, "N": 600, "O": 600, "P": 600, "Q": 600, "R": 600, "S": 600, "T": 600, "U": 600, "V": 600, "W": 600, "X": 600, "Y": 600, "Z": 600, "bracketleft": 600, "backslash": 600, "bracketright": 600, "asciicircum": 600, "underscore": 600, "quoteleft": 600, "a": 600, "b": 600, "c": 600, "d": 600, "e": 600, "f": 600, "g": 600, "h": 600, "i": 600, "j": 600, "k": 600, "l": 600, "m": 600, "n": 600, "o": 600, "p": 600, "q": 600, "r": 600, "s": 600, "t": 600, "u": 600, "v": 600, "w": 600, "x": 600, "y": 600, "z": 600, "braceleft": 600, "bar": 600, "braceright": 600, "asciitilde": 600, "exclamdown": 600, "cent": 600, "sterling": 600, "fraction": 600, "yen": 600, "florin": 600, "section": 600, "currency": 600, "quotesingle": 600, "quotedblleft": 600, "guillemotleft": 600, "guilsinglleft": 600, "guilsinglright": 600, "fi": 600, "fl": 600, "endash": 600, "dagger": 600, "daggerdbl": 600, "periodcentered": 600, "paragraph": 600, "bullet": 600, "quotesinglbase": 600, "quotedblbase": 600, "quotedblright": 600, "guillemotright": 600, "ellipsis": 600, "perthousand": 600, "questiondown": 600, "grave": 600, "acute": 600, "circumflex": 600, "tilde": 600, "macron": 600, "breve": 600, "dotaccent": 600, "dieresis": 600, "ring": 600, "cedilla": 600, "hungarumlaut": 600, "ogonek": 600, "caron": 600, "emdash": 600, "AE": 600, "ordfeminine": 600, "Lslash": 600, "Oslash": 600, "OE": 600, "ordmasculine": 600, "ae": 600, "dotlessi": 600, "lslash": 600, "oslash": 600, "oe": 600, "germandbls": 600, "Idieresis": 600, "eacute": 600, "abreve": 600, "uhungarumlaut": 600, "ecaron": 600, "Ydieresis": 600, "divide": 600, "Yacute": 600, "Acircumflex": 600, "aacute": 600, "Ucircumflex": 600, "yacute": 600, "scommaaccent": 600, "ecircumflex": 600, "Uring": 600, "Udieresis": 600, "aogonek": 600, "Uacute": 600, "uogonek": 600, "Edieresis": 600, "Dcroat": 600, "commaaccent": 600, "copyright": 600, "Emacron": 600, "ccaron": 600, "aring": 600, "Ncommaaccent": 600, "lacute": 600, "agrave": 600, "Tcommaaccent": 600, "Cacute": 600, "atilde": 600, "Edotaccent": 600, "scaron": 600, "scedilla": 600, "iacute": 600, "lozenge": 600, "Rcaron": 600, "Gcommaaccent": 600, "ucircumflex": 600, "acircumflex": 600, "Amacron": 600, "rcaron": 600, "ccedilla": 600, "Zdotaccent": 600, "Thorn": 600, "Omacron": 600, "Racute": 600, "Sacute": 600, "dcaron": 600, "Umacron": 600, "uring": 600, "threesuperior": 600, "Ograve": 600, "Agrave": 600, "Abreve": 600, "multiply": 600, "uacute": 600, "Tcaron": 600, "partialdiff": 600, "ydieresis": 600, "Nacute": 600, "icircumflex": 600, "Ecircumflex": 600, "adieresis": 600, "edieresis": 600, "cacute": 600, "nacute": 600, "umacron": 600, "Ncaron": 600, "Iacute": 600, "plusminus": 600, "brokenbar": 600, "registered": 600, "Gbreve": 600, "Idotaccent": 600, "summation": 600, "Egrave": 600, "racute": 600, "omacron": 600, "Zacute": 600, "Zcaron": 600, "greaterequal": 600, "Eth": 600, "Ccedilla": 600, "lcommaaccent": 600, "tcaron": 600, "eogonek": 600, "Uogonek": 600, "Aacute": 600, "Adieresis": 600, "egrave": 600, "zacute": 600, "iogonek": 600, "Oacute": 600, "oacute": 600, "amacron": 600, "sacute": 600, "idieresis": 600, "Ocircumflex": 600, "Ugrave": 600, "Delta": 600, "thorn": 600, "twosuperior": 600, "Odieresis": 600, "mu": 600, "igrave": 600, "ohungarumlaut": 600, "Eogonek": 600, "dcroat": 600, "threequarters": 600, "Scedilla": 600, "lcaron": 600, "Kcommaaccent": 600, "Lacute": 600, "trademark": 600, "edotaccent": 600, "Igrave": 600, "Imacron": 600, "Lcaron": 600, "onehalf": 600, "lessequal": 600, "ocircumflex": 600, "ntilde": 600, "Uhungarumlaut": 600, "Eacute": 600, "emacron": 600, "gbreve": 600, "onequarter": 600, "Scaron": 600, "Scommaaccent": 600, "Ohungarumlaut": 600, "degree": 600, "ograve": 600, "Ccaron": 600, "ugrave": 600, "radical": 600, "Dcaron": 600, "rcommaaccent": 600, "Ntilde": 600, "otilde": 600, "Rcommaaccent": 600, "Lcommaaccent": 600, "Atilde": 600, "Aogonek": 600, "Aring": 600, "Otilde": 600, "zdotaccent": 600, "Ecaron": 600, "Iogonek": 600, "kcommaaccent": 600, "minus": 600, "Icircumflex": 600, "ncaron": 600, "tcommaaccent": 600, "logicalnot": 600, "odieresis": 600, "udieresis": 600, "notequal": 600, "gcommaaccent": 600, "eth": 600, "zcaron": 600, "ncommaaccent": 600, "onesuperior": 600, "imacron": 600, "Euro": 600},
},
"Courier-BoldOblique": {
types.NewRectangle(-57.0, -250.0, 869.0, 801.0),
map[string]int{"space": 600, "exclam": 600, "quotedbl": 600, "numbersign": 600, "dollar": 600, "percent": 600, "ampersand": 600, "quoteright": 600, "parenleft": 600, "parenright": 600, "asterisk": 600, "plus": 600, "comma": 600, "hyphen": 600, "period": 600, "slash": 600, "zero": 600, "one": 600, "two": 600, "three": 600, "four": 600, "five": 600, "six": 600, "seven": 600, "eight": 600, "nine": 600, "colon": 600, "semicolon": 600, "less": 600, "equal": 600, "greater": 600, "question": 600, "at": 600, "A": 600, "B": 600, "C": 600, "D": 600, "E": 600, "F": 600, "G": 600, "H": 600, "I": 600, "J": 600, "K": 600, "L": 600, "M": 600, "N": 600, "O": 600, "P": 600, "Q": 600, "R": 600, "S": 600, "T": 600, "U": 600, "V": 600, "W": 600, "X": 600, "Y": 600, "Z": 600, "bracketleft": 600, "backslash": 600, "bracketright": 600, "asciicircum": 600, "underscore": 600, "quoteleft": 600, "a": 600, "b": 600, "c": 600, "d": 600, "e": 600, "f": 600, "g": 600, "h": 600, "i": 600, "j": 600, "k": 600, "l": 600, "m": 600, "n": 600, "o": 600, "p": 600, "q": 600, "r": 600, "s": 600, "t": 600, "u": 600, "v": 600, "w": 600, "x": 600, "y": 600, "z": 600, "braceleft": 600, "bar": 600, "braceright": 600, "asciitilde": 600, "exclamdown": 600, "cent": 600, "sterling": 600, "fraction": 600, "yen": 600, "florin": 600, "section": 600, "currency": 600, "quotesingle": 600, "quotedblleft": 600, "guillemotleft": 600, "guilsinglleft": 600, "guilsinglright": 600, "fi": 600, "fl": 600, "endash": 600, "dagger": 600, "daggerdbl": 600, "periodcentered": 600, "paragraph": 600, "bullet": 600, "quotesinglbase": 600, "quotedblbase": 600, "quotedblright": 600, "guillemotright": 600, "ellipsis": 600, "perthousand": 600, "questiondown": 600, "grave": 600, "acute": 600, "circumflex": 600, "tilde": 600, "macron": 600, "breve": 600, "dotaccent": 600, "dieresis": 600, "ring": 600, "cedilla": 600, "hungarumlaut": 600, "ogonek": 600, "caron": 600, "emdash": 600, "AE": 600, "ordfeminine": 600, "Lslash": 600, "Oslash": 600, "OE": 600, "ordmasculine": 600, "ae": 600, "dotlessi": 600, "lslash": 600, "oslash": 600, "oe": 600, "germandbls": 600, "Idieresis": 600, "eacute": 600, "abreve": 600, "uhungarumlaut": 600, "ecaron": 600, "Ydieresis": 600, "divide": 600, "Yacute": 600, "Acircumflex": 600, "aacute": 600, "Ucircumflex": 600, "yacute": 600, "scommaaccent": 600, "ecircumflex": 600, "Uring": 600, "Udieresis": 600, "aogonek": 600, "Uacute": 600, "uogonek": 600, "Edieresis": 600, "Dcroat": 600, "commaaccent": 600, "copyright": 600, "Emacron": 600, "ccaron": 600, "aring": 600, "Ncommaaccent": 600, "lacute": 600, "agrave": 600, "Tcommaaccent": 600, "Cacute": 600, "atilde": 600, "Edotaccent": 600, "scaron": 600, "scedilla": 600, "iacute": 600, "lozenge": 600, "Rcaron": 600, "Gcommaaccent": 600, "ucircumflex": 600, "acircumflex": 600, "Amacron": 600, "rcaron": 600, "ccedilla": 600, "Zdotaccent": 600, "Thorn": 600, "Omacron": 600, "Racute": 600, "Sacute": 600, "dcaron": 600, "Umacron": 600, "uring": 600, "threesuperior": 600, "Ograve": 600, "Agrave": 600, "Abreve": 600, "multiply": 600, "uacute": 600, "Tcaron": 600, "partialdiff": 600, "ydieresis": 600, "Nacute": 600, "icircumflex": 600, "Ecircumflex": 600, "adieresis": 600, "edieresis": 600, "cacute": 600, "nacute": 600, "umacron": 600, "Ncaron": 600, "Iacute": 600, "plusminus": 600, "brokenbar": 600, "registered": 600, "Gbreve": 600, "Idotaccent": 600, "summation": 600, "Egrave": 600, "racute": 600, "omacron": 600, "Zacute": 600, "Zcaron": 600, "greaterequal": 600, "Eth": 600, "Ccedilla": 600, "lcommaaccent": 600, "tcaron": 600, "eogonek": 600, "Uogonek": 600, "Aacute": 600, "Adieresis": 600, "egrave": 600, "zacute": 600, "iogonek": 600, "Oacute": 600, "oacute": 600, "amacron": 600, "sacute": 600, "idieresis": 600, "Ocircumflex": 600, "Ugrave": 600, "Delta": 600, "thorn": 600, "twosuperior": 600, "Odieresis": 600, "mu": 600, "igrave": 600, "ohungarumlaut": 600, "Eogonek": 600, "dcroat": 600, "threequarters": 600, "Scedilla": 600, "lcaron": 600, "Kcommaaccent": 600, "Lacute": 600, "trademark": 600, "edotaccent": 600, "Igrave": 600, "Imacron": 600, "Lcaron": 600, "onehalf": 600, "lessequal": 600, "ocircumflex": 600, "ntilde": 600, "Uhungarumlaut": 600, "Eacute": 600, "emacron": 600, "gbreve": 600, "onequarter": 600, "Scaron": 600, "Scommaaccent": 600, "Ohungarumlaut": 600, "degree": 600, "ograve": 600, "Ccaron": 600, "ugrave": 600, "radical": 600, "Dcaron": 600, "rcommaaccent": 600, "Ntilde": 600, "otilde": 600, "Rcommaaccent": 600, "Lcommaaccent": 600, "Atilde": 600, "Aogonek": 600, "Aring": 600, "Otilde": 600, "zdotaccent": 600, "Ecaron": 600, "Iogonek": 600, "kcommaaccent": 600, "minus": 600, "Icircumflex": 600, "ncaron": 600, "tcommaaccent": 600, "logicalnot": 600, "odieresis": 600, "udieresis": 600, "notequal": 600, "gcommaaccent": 600, "eth": 600, "zcaron": 600, "ncommaaccent": 600, "onesuperior": 600, "imacron": 600, "Euro": 600},
},
"Courier-Oblique": {
types.NewRectangle(-27.0, -250.0, 849.0, 805.0),
map[string]int{"space": 600, "exclam": 600, "quotedbl": 600, "numbersign": 600, "dollar": 600, "percent": 600, "ampersand": 600, "quoteright": 600, "parenleft": 600, "parenright": 600, "asterisk": 600, "plus": 600, "comma": 600, "hyphen": 600, "period": 600, "slash": 600, "zero": 600, "one": 600, "two": 600, "three": 600, "four": 600, "five": 600, "six": 600, "seven": 600, "eight": 600, "nine": 600, "colon": 600, "semicolon": 600, "less": 600, "equal": 600, "greater": 600, "question": 600, "at": 600, "A": 600, "B": 600, "C": 600, "D": 600, "E": 600, "F": 600, "G": 600, "H": 600, "I": 600, "J": 600, "K": 600, "L": 600, "M": 600, "N": 600, "O": 600, "P": 600, "Q": 600, "R": 600, "S": 600, "T": 600, "U": 600, "V": 600, "W": 600, "X": 600, "Y": 600, "Z": 600, "bracketleft": 600, "backslash": 600, "bracketright": 600, "asciicircum": 600, "underscore": 600, "quoteleft": 600, "a": 600, "b": 600, "c": 600, "d": 600, "e": 600, "f": 600, "g": 600, "h": 600, "i": 600, "j": 600, "k": 600, "l": 600, "m": 600, "n": 600, "o": 600, "p": 600, "q": 600, "r": 600, "s": 600, "t": 600, "u": 600, "v": 600, "w": 600, "x": 600, "y": 600, "z": 600, "braceleft": 600, "bar": 600, "braceright": 600, "asciitilde": 600, "exclamdown": 600, "cent": 600, "sterling": 600, "fraction": 600, "yen": 600, "florin": 600, "section": 600, "currency": 600, "quotesingle": 600, "quotedblleft": 600, "guillemotleft": 600, "guilsinglleft": 600, "guilsinglright": 600, "fi": 600, "fl": 600, "endash": 600, "dagger": 600, "daggerdbl": 600, "periodcentered": 600, "paragraph": 600, "bullet": 600, "quotesinglbase": 600, "quotedblbase": 600, "quotedblright": 600, "guillemotright": 600, "ellipsis": 600, "perthousand": 600, "questiondown": 600, "grave": 600, "acute": 600, "circumflex": 600, "tilde": 600, "macron": 600, "breve": 600, "dotaccent": 600, "dieresis": 600, "ring": 600, "cedilla": 600, "hungarumlaut": 600, "ogonek": 600, "caron": 600, "emdash": 600, "AE": 600, "ordfeminine": 600, "Lslash": 600, "Oslash": 600, "OE": 600, "ordmasculine": 600, "ae": 600, "dotlessi": 600, "lslash": 600, "oslash": 600, "oe": 600, "germandbls": 600, "Idieresis": 600, "eacute": 600, "abreve": 600, "uhungarumlaut": 600, "ecaron": 600, "Ydieresis": 600, "divide": 600, "Yacute": 600, "Acircumflex": 600, "aacute": 600, "Ucircumflex": 600, "yacute": 600, "scommaaccent": 600, "ecircumflex": 600, "Uring": 600, "Udieresis": 600, "aogonek": 600, "Uacute": 600, "uogonek": 600, "Edieresis": 600, "Dcroat": 600, "commaaccent": 600, "copyright": 600, "Emacron": 600, "ccaron": 600, "aring": 600, "Ncommaaccent": 600, "lacute": 600, "agrave": 600, "Tcommaaccent": 600, "Cacute": 600, "atilde": 600, "Edotaccent": 600, "scaron": 600, "scedilla": 600, "iacute": 600, "lozenge": 600, "Rcaron": 600, "Gcommaaccent": 600, "ucircumflex": 600, "acircumflex": 600, "Amacron": 600, "rcaron": 600, "ccedilla": 600, "Zdotaccent": 600, "Thorn": 600, "Omacron": 600, "Racute": 600, "Sacute": 600, "dcaron": 600, "Umacron": 600, "uring": 600, "threesuperior": 600, "Ograve": 600, "Agrave": 600, "Abreve": 600, "multiply": 600, "uacute": 600, "Tcaron": 600, "partialdiff": 600, "ydieresis": 600, "Nacute": 600, "icircumflex": 600, "Ecircumflex": 600, "adieresis": 600, "edieresis": 600, "cacute": 600, "nacute": 600, "umacron": 600, "Ncaron": 600, "Iacute": 600, "plusminus": 600, "brokenbar": 600, "registered": 600, "Gbreve": 600, "Idotaccent": 600, "summation": 600, "Egrave": 600, "racute": 600, "omacron": 600, "Zacute": 600, "Zcaron": 600, "greaterequal": 600, "Eth": 600, "Ccedilla": 600, "lcommaaccent": 600, "tcaron": 600, "eogonek": 600, "Uogonek": 600, "Aacute": 600, "Adieresis": 600, "egrave": 600, "zacute": 600, "iogonek": 600, "Oacute": 600, "oacute": 600, "amacron": 600, "sacute": 600, "idieresis": 600, "Ocircumflex": 600, "Ugrave": 600, "Delta": 600, "thorn": 600, "twosuperior": 600, "Odieresis": 600, "mu": 600, "igrave": 600, "ohungarumlaut": 600, "Eogonek": 600, "dcroat": 600, "threequarters": 600, "Scedilla": 600, "lcaron": 600, "Kcommaaccent": 600, "Lacute": 600, "trademark": 600, "edotaccent": 600, "Igrave": 600, "Imacron": 600, "Lcaron": 600, "onehalf": 600, "lessequal": 600, "ocircumflex": 600, "ntilde": 600, "Uhungarumlaut": 600, "Eacute": 600, "emacron": 600, "gbreve": 600, "onequarter": 600, "Scaron": 600, "Scommaaccent": 600, "Ohungarumlaut": 600, "degree": 600, "ograve": 600, "Ccaron": 600, "ugrave": 600, "radical": 600, "Dcaron": 600, "rcommaaccent": 600, "Ntilde": 600, "otilde": 600, "Rcommaaccent": 600, "Lcommaaccent": 600, "Atilde": 600, "Aogonek": 600, "Aring": 600, "Otilde": 600, "zdotaccent": 600, "Ecaron": 600, "Iogonek": 600, "kcommaaccent": 600, "minus": 600, "Icircumflex": 600, "ncaron": 600, "tcommaaccent": 600, "logicalnot": 600, "odieresis": 600, "udieresis": 600, "notequal": 600, "gcommaaccent": 600, "eth": 600, "zcaron": 600, "ncommaaccent": 600, "onesuperior": 600, "imacron": 600, "Euro": 600},
},
"Courier": {
types.NewRectangle(-23.0, -250.0, 715.0, 805.0),
map[string]int{"space": 600, "exclam": 600, "quotedbl": 600, "numbersign": 600, "dollar": 600, "percent": 600, "ampersand": 600, "quoteright": 600, "parenleft": 600, "parenright": 600, "asterisk": 600, "plus": 600, "comma": 600, "hyphen": 600, "period": 600, "slash": 600, "zero": 600, "one": 600, "two": 600, "three": 600, "four": 600, "five": 600, "six": 600, "seven": 600, "eight": 600, "nine": 600, "colon": 600, "semicolon": 600, "less": 600, "equal": 600, "greater": 600, "question": 600, "at": 600, "A": 600, "B": 600, "C": 600, "D": 600, "E": 600, "F": 600, "G": 600, "H": 600, "I": 600, "J": 600, "K": 600, "L": 600, "M": 600, "N": 600, "O": 600, "P": 600, "Q": 600, "R": 600, "S": 600, "T": 600, "U": 600, "V": 600, "W": 600, "X": 600, "Y": 600, "Z": 600, "bracketleft": 600, "backslash": 600, "bracketright": 600, "asciicircum": 600, "underscore": 600, "quoteleft": 600, "a": 600, "b": 600, "c": 600, "d": 600, "e": 600, "f": 600, "g": 600, "h": 600, "i": 600, "j": 600, "k": 600, "l": 600, "m": 600, "n": 600, "o": 600, "p": 600, "q": 600, "r": 600, "s": 600, "t": 600, "u": 600, "v": 600, "w": 600, "x": 600, "y": 600, "z": 600, "braceleft": 600, "bar": 600, "braceright": 600, "asciitilde": 600, "exclamdown": 600, "cent": 600, "sterling": 600, "fraction": 600, "yen": 600, "florin": 600, "section": 600, "currency": 600, "quotesingle": 600, "quotedblleft": 600, "guillemotleft": 600, "guilsinglleft": 600, "guilsinglright": 600, "fi": 600, "fl": 600, "endash": 600, "dagger": 600, "daggerdbl": 600, "periodcentered": 600, "paragraph": 600, "bullet": 600, "quotesinglbase": 600, "quotedblbase": 600, "quotedblright": 600, "guillemotright": 600, "ellipsis": 600, "perthousand": 600, "questiondown": 600, "grave": 600, "acute": 600, "circumflex": 600, "tilde": 600, "macron": 600, "breve": 600, "dotaccent": 600, "dieresis": 600, "ring": 600, "cedilla": 600, "hungarumlaut": 600, "ogonek": 600, "caron": 600, "emdash": 600, "AE": 600, "ordfeminine": 600, "Lslash": 600, "Oslash": 600, "OE": 600, "ordmasculine": 600, "ae": 600, "dotlessi": 600, "lslash": 600, "oslash": 600, "oe": 600, "germandbls": 600, "Idieresis": 600, "eacute": 600, "abreve": 600, "uhungarumlaut": 600, "ecaron": 600, "Ydieresis": 600, "divide": 600, "Yacute": 600, "Acircumflex": 600, "aacute": 600, "Ucircumflex": 600, "yacute": 600, "scommaaccent": 600, "ecircumflex": 600, "Uring": 600, "Udieresis": 600, "aogonek": 600, "Uacute": 600, "uogonek": 600, "Edieresis": 600, "Dcroat": 600, "commaaccent": 600, "copyright": 600, "Emacron": 600, "ccaron": 600, "aring": 600, "Ncommaaccent": 600, "lacute": 600, "agrave": 600, "Tcommaaccent": 600, "Cacute": 600, "atilde": 600, "Edotaccent": 600, "scaron": 600, "scedilla": 600, "iacute": 600, "lozenge": 600, "Rcaron": 600, "Gcommaaccent": 600, "ucircumflex": 600, "acircumflex": 600, "Amacron": 600, "rcaron": 600, "ccedilla": 600, "Zdotaccent": 600, "Thorn": 600, "Omacron": 600, "Racute": 600, "Sacute": 600, "dcaron": 600, "Umacron": 600, "uring": 600, "threesuperior": 600, "Ograve": 600, "Agrave": 600, "Abreve": 600, "multiply": 600, "uacute": 600, "Tcaron": 600, "partialdiff": 600, "ydieresis": 600, "Nacute": 600, "icircumflex": 600, "Ecircumflex": 600, "adieresis": 600, "edieresis": 600, "cacute": 600, "nacute": 600, "umacron": 600, "Ncaron": 600, "Iacute": 600, "plusminus": 600, "brokenbar": 600, "registered": 600, "Gbreve": 600, "Idotaccent": 600, "summation": 600, "Egrave": 600, "racute": 600, "omacron": 600, "Zacute": 600, "Zcaron": 600, "greaterequal": 600, "Eth": 600, "Ccedilla": 600, "lcommaaccent": 600, "tcaron": 600, "eogonek": 600, "Uogonek": 600, "Aacute": 600, "Adieresis": 600, "egrave": 600, "zacute": 600, "iogonek": 600, "Oacute": 600, "oacute": 600, "amacron": 600, "sacute": 600, "idieresis": 600, "Ocircumflex": 600, "Ugrave": 600, "Delta": 600, "thorn": 600, "twosuperior": 600, "Odieresis": 600, "mu": 600, "igrave": 600, "ohungarumlaut": 600, "Eogonek": 600, "dcroat": 600, "threequarters": 600, "Scedilla": 600, "lcaron": 600, "Kcommaaccent": 600, "Lacute": 600, "trademark": 600, "edotaccent": 600, "Igrave": 600, "Imacron": 600, "Lcaron": 600, "onehalf": 600, "lessequal": 600, "ocircumflex": 600, "ntilde": 600, "Uhungarumlaut": 600, "Eacute": 600, "emacron": 600, "gbreve": 600, "onequarter": 600, "Scaron": 600, "Scommaaccent": 600, "Ohungarumlaut": 600, "degree": 600, "ograve": 600, "Ccaron": 600, "ugrave": 600, "radical": 600, "Dcaron": 600, "rcommaaccent": 600, "Ntilde": 600, "otilde": 600, "Rcommaaccent": 600, "Lcommaaccent": 600, "Atilde": 600, "Aogonek": 600, "Aring": 600, "Otilde": 600, "zdotaccent": 600, "Ecaron": 600, "Iogonek": 600, "kcommaaccent": 600, "minus": 600, "Icircumflex": 600, "ncaron": 600, "tcommaaccent": 600, "logicalnot": 600, "odieresis": 600, "udieresis": 600, "notequal": 600, "gcommaaccent": 600, "eth": 600, "zcaron": 600, "ncommaaccent": 600, "onesuperior": 600, "imacron": 600, "Euro": 600},
},
"Helvetica-Bold": {
types.NewRectangle(-170.0, -228.0, 1003.0, 962.0),
map[string]int{"space": 278, "exclam": 333, "quotedbl": 474, "numbersign": 556, "dollar": 556, "percent": 889, "ampersand": 722, "quoteright": 278, "parenleft": 333, "parenright": 333, "asterisk": 389, "plus": 584, "comma": 278, "hyphen": 333, "period": 278, "slash": 278, "zero": 556, "one": 556, "two": 556, "three": 556, "four": 556, "five": 556, "six": 556, "seven": 556, "eight": 556, "nine": 556, "colon": 333, "semicolon": 333, "less": 584, "equal": 584, "greater": 584, "question": 611, "at": 975, "A": 722, "B": 722, "C": 722, "D": 722, "E": 667, "F": 611, "G": 778, "H": 722, "I": 278, "J": 556, "K": 722, "L": 611, "M": 833, "N": 722, "O": 778, "P": 667, "Q": 778, "R": 722, "S": 667, "T": 611, "U": 722, "V": 667, "W": 944, "X": 667, "Y": 667, "Z": 611, "bracketleft": 333, "backslash": 278, "bracketright": 333, "asciicircum": 584, "underscore": 556, "quoteleft": 278, "a": 556, "b": 611, "c": 556, "d": 611, "e": 556, "f": 333, "g": 611, "h": 611, "i": 278, "j": 278, "k": 556, "l": 278, "m": 889, "n": 611, "o": 611, "p": 611, "q": 611, "r": 389, "s": 556, "t": 333, "u": 611, "v": 556, "w": 778, "x": 556, "y": 556, "z": 500, "braceleft": 389, "bar": 280, "braceright": 389, "asciitilde": 584, "exclamdown": 333, "cent": 556, "sterling": 556, "fraction": 167, "yen": 556, "florin": 556, "section": 556, "currency": 556, "quotesingle": 238, "quotedblleft": 500, "guillemotleft": 556, "guilsinglleft": 333, "guilsinglright": 333, "fi": 611, "fl": 611, "endash": 556, "dagger": 556, "daggerdbl": 556, "periodcentered": 278, "paragraph": 556, "bullet": 350, "quotesinglbase": 278, "quotedblbase": 500, "quotedblright": 500, "guillemotright": 556, "ellipsis": 1000, "perthousand": 1000, "questiondown": 611, "grave": 333, "acute": 333, "circumflex": 333, "tilde": 333, "macron": 333, "breve": 333, "dotaccent": 333, "dieresis": 333, "ring": 333, "cedilla": 333, "hungarumlaut": 333, "ogonek": 333, "caron": 333, "emdash": 1000, "AE": 1000, "ordfeminine": 370, "Lslash": 611, "Oslash": 778, "OE": 1000, "ordmasculine": 365, "ae": 889, "dotlessi": 278, "lslash": 278, "oslash": 611, "oe": 944, "germandbls": 611, "Idieresis": 278, "eacute": 556, "abreve": 556, "uhungarumlaut": 611, "ecaron": 556, "Ydieresis": 667, "divide": 584, "Yacute": 667, "Acircumflex": 722, "aacute": 556, "Ucircumflex": 722, "yacute": 556, "scommaaccent": 556, "ecircumflex": 556, "Uring": 722, "Udieresis": 722, "aogonek": 556, "Uacute": 722, "uogonek": 611, "Edieresis": 667, "Dcroat": 722, "commaaccent": 250, "copyright": 737, "Emacron": 667, "ccaron": 556, "aring": 556, "Ncommaaccent": 722, "lacute": 278, "agrave": 556, "Tcommaaccent": 611, "Cacute": 722, "atilde": 556, "Edotaccent": 667, "scaron": 556, "scedilla": 556, "iacute": 278, "lozenge": 494, "Rcaron": 722, "Gcommaaccent": 778, "ucircumflex": 611, "acircumflex": 556, "Amacron": 722, "rcaron": 389, "ccedilla": 556, "Zdotaccent": 611, "Thorn": 667, "Omacron": 778, "Racute": 722, "Sacute": 667, "dcaron": 743, "Umacron": 722, "uring": 611, "threesuperior": 333, "Ograve": 778, "Agrave": 722, "Abreve": 722, "multiply": 584, "uacute": 611, "Tcaron": 611, "partialdiff": 494, "ydieresis": 556, "Nacute": 722, "icircumflex": 278, "Ecircumflex": 667, "adieresis": 556, "edieresis": 556, "cacute": 556, "nacute": 611, "umacron": 611, "Ncaron": 722, "Iacute": 278, "plusminus": 584, "brokenbar": 280, "registered": 737, "Gbreve": 778, "Idotaccent": 278, "summation": 600, "Egrave": 667, "racute": 389, "omacron": 611, "Zacute": 611, "Zcaron": 611, "greaterequal": 549, "Eth": 722, "Ccedilla": 722, "lcommaaccent": 278, "tcaron": 389, "eogonek": 556, "Uogonek": 722, "Aacute": 722, "Adieresis": 722, "egrave": 556, "zacute": 500, "iogonek": 278, "Oacute": 778, "oacute": 611, "amacron": 556, "sacute": 556, "idieresis": 278, "Ocircumflex": 778, "Ugrave": 722, "Delta": 612, "thorn": 611, "twosuperior": 333, "Odieresis": 778, "mu": 611, "igrave": 278, "ohungarumlaut": 611, "Eogonek": 667, "dcroat": 611, "threequarters": 834, "Scedilla": 667, "lcaron": 400, "Kcommaaccent": 722, "Lacute": 611, "trademark": 1000, "edotaccent": 556, "Igrave": 278, "Imacron": 278, "Lcaron": 611, "onehalf": 834, "lessequal": 549, "ocircumflex": 611, "ntilde": 611, "Uhungarumlaut": 722, "Eacute": 667, "emacron": 556, "gbreve": 611, "onequarter": 834, "Scaron": 667, "Scommaaccent": 667, "Ohungarumlaut": 778, "degree": 400, "ograve": 611, "Ccaron": 722, "ugrave": 611, "radical": 549, "Dcaron": 722, "rcommaaccent": 389, "Ntilde": 722, "otilde": 611, "Rcommaaccent": 722, "Lcommaaccent": 611, "Atilde": 722, "Aogonek": 722, "Aring": 722, "Otilde": 778, "zdotaccent": 500, "Ecaron": 667, "Iogonek": 278, "kcommaaccent": 556, "minus": 584, "Icircumflex": 278, "ncaron": 611, "tcommaaccent": 333, "logicalnot": 584, "odieresis": 611, "udieresis": 611, "notequal": 549, "gcommaaccent": 611, "eth": 611, "zcaron": 500, "ncommaaccent": 611, "onesuperior": 333, "imacron": 278, "Euro": 556},
},
"Helvetica-BoldOblique": {
types.NewRectangle(-174.0, -228.0, 1114.0, 962.0),
map[string]int{"space": 278, "exclam": 333, "quotedbl": 474, "numbersign": 556, "dollar": 556, "percent": 889, "ampersand": 722, "quoteright": 278, "parenleft": 333, "parenright": 333, "asterisk": 389, "plus": 584, "comma": 278, "hyphen": 333, "period": 278, "slash": 278, "zero": 556, "one": 556, "two": 556, "three": 556, "four": 556, "five": 556, "six": 556, "seven": 556, "eight": 556, "nine": 556, "colon": 333, "semicolon": 333, "less": 584, "equal": 584, "greater": 584, "question": 611, "at": 975, "A": 722, "B": 722, "C": 722, "D": 722, "E": 667, "F": 611, "G": 778, "H": 722, "I": 278, "J": 556, "K": 722, "L": 611, "M": 833, "N": 722, "O": 778, "P": 667, "Q": 778, "R": 722, "S": 667, "T": 611, "U": 722, "V": 667, "W": 944, "X": 667, "Y": 667, "Z": 611, "bracketleft": 333, "backslash": 278, "bracketright": 333, "asciicircum": 584, "underscore": 556, "quoteleft": 278, "a": 556, "b": 611, "c": 556, "d": 611, "e": 556, "f": 333, "g": 611, "h": 611, "i": 278, "j": 278, "k": 556, "l": 278, "m": 889, "n": 611, "o": 611, "p": 611, "q": 611, "r": 389, "s": 556, "t": 333, "u": 611, "v": 556, "w": 778, "x": 556, "y": 556, "z": 500, "braceleft": 389, "bar": 280, "braceright": 389, "asciitilde": 584, "exclamdown": 333, "cent": 556, "sterling": 556, "fraction": 167, "yen": 556, "florin": 556, "section": 556, "currency": 556, "quotesingle": 238, "quotedblleft": 500, "guillemotleft": 556, "guilsinglleft": 333, "guilsinglright": 333, "fi": 611, "fl": 611, "endash": 556, "dagger": 556, "daggerdbl": 556, "periodcentered": 278, "paragraph": 556, "bullet": 350, "quotesinglbase": 278, "quotedblbase": 500, "quotedblright": 500, "guillemotright": 556, "ellipsis": 1000, "perthousand": 1000, "questiondown": 611, "grave": 333, "acute": 333, "circumflex": 333, "tilde": 333, "macron": 333, "breve": 333, "dotaccent": 333, "dieresis": 333, "ring": 333, "cedilla": 333, "hungarumlaut": 333, "ogonek": 333, "caron": 333, "emdash": 1000, "AE": 1000, "ordfeminine": 370, "Lslash": 611, "Oslash": 778, "OE": 1000, "ordmasculine": 365, "ae": 889, "dotlessi": 278, "lslash": 278, "oslash": 611, "oe": 944, "germandbls": 611, "Idieresis": 278, "eacute": 556, "abreve": 556, "uhungarumlaut": 611, "ecaron": 556, "Ydieresis": 667, "divide": 584, "Yacute": 667, "Acircumflex": 722, "aacute": 556, "Ucircumflex": 722, "yacute": 556, "scommaaccent": 556, "ecircumflex": 556, "Uring": 722, "Udieresis": 722, "aogonek": 556, "Uacute": 722, "uogonek": 611, "Edieresis": 667, "Dcroat": 722, "commaaccent": 250, "copyright": 737, "Emacron": 667, "ccaron": 556, "aring": 556, "Ncommaaccent": 722, "lacute": 278, "agrave": 556, "Tcommaaccent": 611, "Cacute": 722, "atilde": 556, "Edotaccent": 667, "scaron": 556, "scedilla": 556, "iacute": 278, "lozenge": 494, "Rcaron": 722, "Gcommaaccent": 778, "ucircumflex": 611, "acircumflex": 556, "Amacron": 722, "rcaron": 389, "ccedilla": 556, "Zdotaccent": 611, "Thorn": 667, "Omacron": 778, "Racute": 722, "Sacute": 667, "dcaron": 743, "Umacron": 722, "uring": 611, "threesuperior": 333, "Ograve": 778, "Agrave": 722, "Abreve": 722, "multiply": 584, "uacute": 611, "Tcaron": 611, "partialdiff": 494, "ydieresis": 556, "Nacute": 722, "icircumflex": 278, "Ecircumflex": 667, "adieresis": 556, "edieresis": 556, "cacute": 556, "nacute": 611, "umacron": 611, "Ncaron": 722, "Iacute": 278, "plusminus": 584, "brokenbar": 280, "registered": 737, "Gbreve": 778, "Idotaccent": 278, "summation": 600, "Egrave": 667, "racute": 389, "omacron": 611, "Zacute": 611, "Zcaron": 611, "greaterequal": 549, "Eth": 722, "Ccedilla": 722, "lcommaaccent": 278, "tcaron": 389, "eogonek": 556, "Uogonek": 722, "Aacute": 722, "Adieresis": 722, "egrave": 556, "zacute": 500, "iogonek": 278, "Oacute": 778, "oacute": 611, "amacron": 556, "sacute": 556, "idieresis": 278, "Ocircumflex": 778, "Ugrave": 722, "Delta": 612, "thorn": 611, "twosuperior": 333, "Odieresis": 778, "mu": 611, "igrave": 278, "ohungarumlaut": 611, "Eogonek": 667, "dcroat": 611, "threequarters": 834, "Scedilla": 667, "lcaron": 400, "Kcommaaccent": 722, "Lacute": 611, "trademark": 1000, "edotaccent": 556, "Igrave": 278, "Imacron": 278, "Lcaron": 611, "onehalf": 834, "lessequal": 549, "ocircumflex": 611, "ntilde": 611, "Uhungarumlaut": 722, "Eacute": 667, "emacron": 556, "gbreve": 611, "onequarter": 834, "Scaron": 667, "Scommaaccent": 667, "Ohungarumlaut": 778, "degree": 400, "ograve": 611, "Ccaron": 722, "ugrave": 611, "radical": 549, "Dcaron": 722, "rcommaaccent": 389, "Ntilde": 722, "otilde": 611, "Rcommaaccent": 722, "Lcommaaccent": 611, "Atilde": 722, "Aogonek": 722, "Aring": 722, "Otilde": 778, "zdotaccent": 500, "Ecaron": 667, "Iogonek": 278, "kcommaaccent": 556, "minus": 584, "Icircumflex": 278, "ncaron": 611, "tcommaaccent": 333, "logicalnot": 584, "odieresis": 611, "udieresis": 611, "notequal": 549, "gcommaaccent": 611, "eth": 611, "zcaron": 500, "ncommaaccent": 611, "onesuperior": 333, "imacron": 278, "Euro": 556},
},
"Helvetica-Oblique": {
types.NewRectangle(-170.0, -225.0, 1116.0, 931.0),
map[string]int{"space": 278, "exclam": 278, "quotedbl": 355, "numbersign": 556, "dollar": 556, "percent": 889, "ampersand": 667, "quoteright": 222, "parenleft": 333, "parenright": 333, "asterisk": 389, "plus": 584, "comma": 278, "hyphen": 333, "period": 278, "slash": 278, "zero": 556, "one": 556, "two": 556, "three": 556, "four": 556, "five": 556, "six": 556, "seven": 556, "eight": 556, "nine": 556, "colon": 278, "semicolon": 278, "less": 584, "equal": 584, "greater": 584, "question": 556, "at": 1015, "A": 667, "B": 667, "C": 722, "D": 722, "E": 667, "F": 611, "G": 778, "H": 722, "I": 278, "J": 500, "K": 667, "L": 556, "M": 833, "N": 722, "O": 778, "P": 667, "Q": 778, "R": 722, "S": 667, "T": 611, "U": 722, "V": 667, "W": 944, "X": 667, "Y": 667, "Z": 611, "bracketleft": 278, "backslash": 278, "bracketright": 278, "asciicircum": 469, "underscore": 556, "quoteleft": 222, "a": 556, "b": 556, "c": 500, "d": 556, "e": 556, "f": 278, "g": 556, "h": 556, "i": 222, "j": 222, "k": 500, "l": 222, "m": 833, "n": 556, "o": 556, "p": 556, "q": 556, "r": 333, "s": 500, "t": 278, "u": 556, "v": 500, "w": 722, "x": 500, "y": 500, "z": 500, "braceleft": 334, "bar": 260, "braceright": 334, "asciitilde": 584, "exclamdown": 333, "cent": 556, "sterling": 556, "fraction": 167, "yen": 556, "florin": 556, "section": 556, "currency": 556, "quotesingle": 191, "quotedblleft": 333, "guillemotleft": 556, "guilsinglleft": 333, "guilsinglright": 333, "fi": 500, "fl": 500, "endash": 556, "dagger": 556, "daggerdbl": 556, "periodcentered": 278, "paragraph": 537, "bullet": 350, "quotesinglbase": 222, "quotedblbase": 333, "quotedblright": 333, "guillemotright": 556, "ellipsis": 1000, "perthousand": 1000, "questiondown": 611, "grave": 333, "acute": 333, "circumflex": 333, "tilde": 333, "macron": 333, "breve": 333, "dotaccent": 333, "dieresis": 333, "ring": 333, "cedilla": 333, "hungarumlaut": 333, "ogonek": 333, "caron": 333, "emdash": 1000, "AE": 1000, "ordfeminine": 370, "Lslash": 556, "Oslash": 778, "OE": 1000, "ordmasculine": 365, "ae": 889, "dotlessi": 278, "lslash": 222, "oslash": 611, "oe": 944, "germandbls": 611, "Idieresis": 278, "eacute": 556, "abreve": 556, "uhungarumlaut": 556, "ecaron": 556, "Ydieresis": 667, "divide": 584, "Yacute": 667, "Acircumflex": 667, "aacute": 556, "Ucircumflex": 722, "yacute": 500, "scommaaccent": 500, "ecircumflex": 556, "Uring": 722, "Udieresis": 722, "aogonek": 556, "Uacute": 722, "uogonek": 556, "Edieresis": 667, "Dcroat": 722, "commaaccent": 250, "copyright": 737, "Emacron": 667, "ccaron": 500, "aring": 556, "Ncommaaccent": 722, "lacute": 222, "agrave": 556, "Tcommaaccent": 611, "Cacute": 722, "atilde": 556, "Edotaccent": 667, "scaron": 500, "scedilla": 500, "iacute": 278, "lozenge": 471, "Rcaron": 722, "Gcommaaccent": 778, "ucircumflex": 556, "acircumflex": 556, "Amacron": 667, "rcaron": 333, "ccedilla": 500, "Zdotaccent": 611, "Thorn": 667, "Omacron": 778, "Racute": 722, "Sacute": 667, "dcaron": 643, "Umacron": 722, "uring": 556, "threesuperior": 333, "Ograve": 778, "Agrave": 667, "Abreve": 667, "multiply": 584, "uacute": 556, "Tcaron": 611, "partialdiff": 476, "ydieresis": 500, "Nacute": 722, "icircumflex": 278, "Ecircumflex": 667, "adieresis": 556, "edieresis": 556, "cacute": 500, "nacute": 556, "umacron": 556, "Ncaron": 722, "Iacute": 278, "plusminus": 584, "brokenbar": 260, "registered": 737, "Gbreve": 778, "Idotaccent": 278, "summation": 600, "Egrave": 667, "racute": 333, "omacron": 556, "Zacute": 611, "Zcaron": 611, "greaterequal": 549, "Eth": 722, "Ccedilla": 722, "lcommaaccent": 222, "tcaron": 317, "eogonek": 556, "Uogonek": 722, "Aacute": 667, "Adieresis": 667, "egrave": 556, "zacute": 500, "iogonek": 222, "Oacute": 778, "oacute": 556, "amacron": 556, "sacute": 500, "idieresis": 278, "Ocircumflex": 778, "Ugrave": 722, "Delta": 612, "thorn": 556, "twosuperior": 333, "Odieresis": 778, "mu": 556, "igrave": 278, "ohungarumlaut": 556, "Eogonek": 667, "dcroat": 556, "threequarters": 834, "Scedilla": 667, "lcaron": 299, "Kcommaaccent": 667, "Lacute": 556, "trademark": 1000, "edotaccent": 556, "Igrave": 278, "Imacron": 278, "Lcaron": 556, "onehalf": 834, "lessequal": 549, "ocircumflex": 556, "ntilde": 556, "Uhungarumlaut": 722, "Eacute": 667, "emacron": 556, "gbreve": 556, "onequarter": 834, "Scaron": 667, "Scommaaccent": 667, "Ohungarumlaut": 778, "degree": 400, "ograve": 556, "Ccaron": 722, "ugrave": 556, "radical": 453, "Dcaron": 722, "rcommaaccent": 333, "Ntilde": 722, "otilde": 556, "Rcommaaccent": 722, "Lcommaaccent": 556, "Atilde": 667, "Aogonek": 667, "Aring": 667, "Otilde": 778, "zdotaccent": 500, "Ecaron": 667, "Iogonek": 278, "kcommaaccent": 500, "minus": 584, "Icircumflex": 278, "ncaron": 556, "tcommaaccent": 278, "logicalnot": 584, "odieresis": 556, "udieresis": 556, "notequal": 549, "gcommaaccent": 556, "eth": 556, "zcaron": 500, "ncommaaccent": 556, "onesuperior": 333, "imacron": 278, "Euro": 556},
},
"Helvetica": {
types.NewRectangle(-166.0, -225.0, 1000.0, 931.0),
map[string]int{"space": 278, "exclam": 278, "quotedbl": 355, "numbersign": 556, "dollar": 556, "percent": 889, "ampersand": 667, "quoteright": 222, "parenleft": 333, "parenright": 333, "asterisk": 389, "plus": 584, "comma": 278, "hyphen": 333, "period": 278, "slash": 278, "zero": 556, "one": 556, "two": 556, "three": 556, "four": 556, "five": 556, "six": 556, "seven": 556, "eight": 556, "nine": 556, "colon": 278, "semicolon": 278, "less": 584, "equal": 584, "greater": 584, "question": 556, "at": 1015, "A": 667, "B": 667, "C": 722, "D": 722, "E": 667, "F": 611, "G": 778, "H": 722, "I": 278, "J": 500, "K": 667, "L": 556, "M": 833, "N": 722, "O": 778, "P": 667, "Q": 778, "R": 722, "S": 667, "T": 611, "U": 722, "V": 667, "W": 944, "X": 667, "Y": 667, "Z": 611, "bracketleft": 278, "backslash": 278, "bracketright": 278, "asciicircum": 469, "underscore": 556, "quoteleft": 222, "a": 556, "b": 556, "c": 500, "d": 556, "e": 556, "f": 278, "g": 556, "h": 556, "i": 222, "j": 222, "k": 500, "l": 222, "m": 833, "n": 556, "o": 556, "p": 556, "q": 556, "r": 333, "s": 500, "t": 278, "u": 556, "v": 500, "w": 722, "x": 500, "y": 500, "z": 500, "braceleft": 334, "bar": 260, "braceright": 334, "asciitilde": 584, "exclamdown": 333, "cent": 556, "sterling": 556, "fraction": 167, "yen": 556, "florin": 556, "section": 556, "currency": 556, "quotesingle": 191, "quotedblleft": 333, "guillemotleft": 556, "guilsinglleft": 333, "guilsinglright": 333, "fi": 500, "fl": 500, "endash": 556, "dagger": 556, "daggerdbl": 556, "periodcentered": 278, "paragraph": 537, "bullet": 350, "quotesinglbase": 222, "quotedblbase": 333, "quotedblright": 333, "guillemotright": 556, "ellipsis": 1000, "perthousand": 1000, "questiondown": 611, "grave": 333, "acute": 333, "circumflex": 333, "tilde": 333, "macron": 333, "breve": 333, "dotaccent": 333, "dieresis": 333, "ring": 333, "cedilla": 333, "hungarumlaut": 333, "ogonek": 333, "caron": 333, "emdash": 1000, "AE": 1000, "ordfeminine": 370, "Lslash": 556, "Oslash": 778, "OE": 1000, "ordmasculine": 365, "ae": 889, "dotlessi": 278, "lslash": 222, "oslash": 611, "oe": 944, "germandbls": 611, "Idieresis": 278, "eacute": 556, "abreve": 556, "uhungarumlaut": 556, "ecaron": 556, "Ydieresis": 667, "divide": 584, "Yacute": 667, "Acircumflex": 667, "aacute": 556, "Ucircumflex": 722, "yacute": 500, "scommaaccent": 500, "ecircumflex": 556, "Uring": 722, "Udieresis": 722, "aogonek": 556, "Uacute": 722, "uogonek": 556, "Edieresis": 667, "Dcroat": 722, "commaaccent": 250, "copyright": 737, "Emacron": 667, "ccaron": 500, "aring": 556, "Ncommaaccent": 722, "lacute": 222, "agrave": 556, "Tcommaaccent": 611, "Cacute": 722, "atilde": 556, "Edotaccent": 667, "scaron": 500, "scedilla": 500, "iacute": 278, "lozenge": 471, "Rcaron": 722, "Gcommaaccent": 778, "ucircumflex": 556, "acircumflex": 556, "Amacron": 667, "rcaron": 333, "ccedilla": 500, "Zdotaccent": 611, "Thorn": 667, "Omacron": 778, "Racute": 722, "Sacute": 667, "dcaron": 643, "Umacron": 722, "uring": 556, "threesuperior": 333, "Ograve": 778, "Agrave": 667, "Abreve": 667, "multiply": 584, "uacute": 556, "Tcaron": 611, "partialdiff": 476, "ydieresis": 500, "Nacute": 722, "icircumflex": 278, "Ecircumflex": 667, "adieresis": 556, "edieresis": 556, "cacute": 500, "nacute": 556, "umacron": 556, "Ncaron": 722, "Iacute": 278, "plusminus": 584, "brokenbar": 260, "registered": 737, "Gbreve": 778, "Idotaccent": 278, "summation": 600, "Egrave": 667, "racute": 333, "omacron": 556, "Zacute": 611, "Zcaron": 611, "greaterequal": 549, "Eth": 722, "Ccedilla": 722, "lcommaaccent": 222, "tcaron": 317, "eogonek": 556, "Uogonek": 722, "Aacute": 667, "Adieresis": 667, "egrave": 556, "zacute": 500, "iogonek": 222, "Oacute": 778, "oacute": 556, "amacron": 556, "sacute": 500, "idieresis": 278, "Ocircumflex": 778, "Ugrave": 722, "Delta": 612, "thorn": 556, "twosuperior": 333, "Odieresis": 778, "mu": 556, "igrave": 278, "ohungarumlaut": 556, "Eogonek": 667, "dcroat": 556, "threequarters": 834, "Scedilla": 667, "lcaron": 299, "Kcommaaccent": 667, "Lacute": 556, "trademark": 1000, "edotaccent": 556, "Igrave": 278, "Imacron": 278, "Lcaron": 556, "onehalf": 834, "lessequal": 549, "ocircumflex": 556, "ntilde": 556, "Uhungarumlaut": 722, "Eacute": 667, "emacron": 556, "gbreve": 556, "onequarter": 834, "Scaron": 667, "Scommaaccent": 667, "Ohungarumlaut": 778, "degree": 400, "ograve": 556, "Ccaron": 722, "ugrave": 556, "radical": 453, "Dcaron": 722, "rcommaaccent": 333, "Ntilde": 722, "otilde": 556, "Rcommaaccent": 722, "Lcommaaccent": 556, "Atilde": 667, "Aogonek": 667, "Aring": 667, "Otilde": 778, "zdotaccent": 500, "Ecaron": 667, "Iogonek": 278, "kcommaaccent": 500, "minus": 584, "Icircumflex": 278, "ncaron": 556, "tcommaaccent": 278, "logicalnot": 584, "odieresis": 556, "udieresis": 556, "notequal": 549, "gcommaaccent": 556, "eth": 556, "zcaron": 500, "ncommaaccent": 556, "onesuperior": 333, "imacron": 278, "Euro": 556},
},
"Symbol": {
types.NewRectangle(-180.0, -293.0, 1090.0, 1010.0),
map[string]int{"space": 250, "exclam": 333, "universal": 713, "numbersign": 500, "existential": 549, "percent": 833, "ampersand": 778, "suchthat": 439, "parenleft": 333, "parenright": 333, "asteriskmath": 500, "plus": 549, "comma": 250, "minus": 549, "period": 250, "slash": 278, "zero": 500, "one": 500, "two": 500, "three": 500, "four": 500, "five": 500, "six": 500, "seven": 500, "eight": 500, "nine": 500, "colon": 278, "semicolon": 278, "less": 549, "equal": 549, "greater": 549, "question": 444, "congruent": 549, "Alpha": 722, "Beta": 667, "Chi": 722, "Delta": 612, "Epsilon": 611, "Phi": 763, "Gamma": 603, "Eta": 722, "Iota": 333, "theta1": 631, "Kappa": 722, "Lambda": 686, "Mu": 889, "Nu": 722, "Omicron": 722, "Pi": 768, "Theta": 741, "Rho": 556, "Sigma": 592, "Tau": 611, "Upsilon": 690, "sigma1": 439, "Omega": 768, "Xi": 645, "Psi": 795, "Zeta": 611, "bracketleft": 333, "therefore": 863, "bracketright": 333, "perpendicular": 658, "underscore": 500, "radicalex": 500, "alpha": 631, "beta": 549, "chi": 549, "delta": 494, "epsilon": 439, "phi": 521, "gamma": 411, "eta": 603, "iota": 329, "phi1": 603, "kappa": 549, "lambda": 549, "mu": 576, "nu": 521, "omicron": 549, "pi": 549, "theta": 521, "rho": 549, "sigma": 603, "tau": 439, "upsilon": 576, "omega1": 713, "omega": 686, "xi": 493, "psi": 686, "zeta": 494, "braceleft": 480, "bar": 200, "braceright": 480, "similar": 549, "Euro": 750, "Upsilon1": 620, "minute": 247, "lessequal": 549, "fraction": 167, "infinity": 713, "florin": 500, "club": 753, "diamond": 753, "heart": 753, "spade": 753, "arrowboth": 1042, "arrowleft": 987, "arrowup": 603, "arrowright": 987, "arrowdown": 603, "degree": 400, "plusminus": 549, "second": 411, "greaterequal": 549, "multiply": 549, "proportional": 713, "partialdiff": 494, "bullet": 460, "divide": 549, "notequal": 549, "equivalence": 549, "approxequal": 549, "ellipsis": 1000, "arrowvertex": 603, "arrowhorizex": 1000, "carriagereturn": 658, "aleph": 823, "Ifraktur": 686, "Rfraktur": 795, "weierstrass": 987, "circlemultiply": 768, "circleplus": 768, "emptyset": 823, "intersection": 768, "union": 768, "propersuperset": 713, "reflexsuperset": 713, "notsubset": 713, "propersubset": 713, "reflexsubset": 713, "element": 713, "notelement": 713, "angle": 768, "gradient": 713, "registerserif": 790, "copyrightserif": 790, "trademarkserif": 890, "product": 823, "radical": 549, "dotmath": 250, "logicalnot": 713, "logicaland": 603, "logicalor": 603, "arrowdblboth": 1042, "arrowdblleft": 987, "arrowdblup": 603, "arrowdblright": 987, "arrowdbldown": 603, "lozenge": 494, "angleleft": 329, "registersans": 790, "copyrightsans": 790, "trademarksans": 786, "summation": 713, "parenlefttp": 384, "parenleftex": 384, "parenleftbt": 384, "bracketlefttp": 384, "bracketleftex": 384, "bracketleftbt": 384, "bracelefttp": 494, "braceleftmid": 494, "braceleftbt": 494, "braceex": 494, "angleright": 329, "integral": 274, "integraltp": 686, "integralex": 686, "integralbt": 686, "parenrighttp": 384, "parenrightex": 384, "parenrightbt": 384, "bracketrighttp": 384, "bracketrightex": 384, "bracketrightbt": 384, "bracerighttp": 494, "bracerightmid": 494, "bracerightbt": 494, "apple": 790},
},
"Times-Bold": {
types.NewRectangle(-168.0, -218.0, 1000.0, 935.0),
map[string]int{"space": 250, "exclam": 333, "quotedbl": 555, "numbersign": 500, "dollar": 500, "percent": 1000, "ampersand": 833, "quoteright": 333, "parenleft": 333, "parenright": 333, "asterisk": 500, "plus": 570, "comma": 250, "hyphen": 333, "period": 250, "slash": 278, "zero": 500, "one": 500, "two": 500, "three": 500, "four": 500, "five": 500, "six": 500, "seven": 500, "eight": 500, "nine": 500, "colon": 333, "semicolon": 333, "less": 570, "equal": 570, "greater": 570, "question": 500, "at": 930, "A": 722, "B": 667, "C": 722, "D": 722, "E": 667, "F": 611, "G": 778, "H": 778, "I": 389, "J": 500, "K": 778, "L": 667, "M": 944, "N": 722, "O": 778, "P": 611, "Q": 778, "R": 722, "S": 556, "T": 667, "U": 722, "V": 722, "W": 1000, "X": 722, "Y": 722, "Z": 667, "bracketleft": 333, "backslash": 278, "bracketright": 333, "asciicircum": 581, "underscore": 500, "quoteleft": 333, "a": 500, "b": 556, "c": 444, "d": 556, "e": 444, "f": 333, "g": 500, "h": 556, "i": 278, "j": 333, "k": 556, "l": 278, "m": 833, "n": 556, "o": 500, "p": 556, "q": 556, "r": 444, "s": 389, "t": 333, "u": 556, "v": 500, "w": 722, "x": 500, "y": 500, "z": 444, "braceleft": 394, "bar": 220, "braceright": 394, "asciitilde": 520, "exclamdown": 333, "cent": 500, "sterling": 500, "fraction": 167, "yen": 500, "florin": 500, "section": 500, "currency": 500, "quotesingle": 278, "quotedblleft": 500, "guillemotleft": 500, "guilsinglleft": 333, "guilsinglright": 333, "fi": 556, "fl": 556, "endash": 500, "dagger": 500, "daggerdbl": 500, "periodcentered": 250, "paragraph": 540, "bullet": 350, "quotesinglbase": 333, "quotedblbase": 500, "quotedblright": 500, "guillemotright": 500, "ellipsis": 1000, "perthousand": 1000, "questiondown": 500, "grave": 333, "acute": 333, "circumflex": 333, "tilde": 333, "macron": 333, "breve": 333, "dotaccent": 333, "dieresis": 333, "ring": 333, "cedilla": 333, "hungarumlaut": 333, "ogonek": 333, "caron": 333, "emdash": 1000, "AE": 1000, "ordfeminine": 300, "Lslash": 667, "Oslash": 778, "OE": 1000, "ordmasculine": 330, "ae": 722, "dotlessi": 278, "lslash": 278, "oslash": 500, "oe": 722, "germandbls": 556, "Idieresis": 389, "eacute": 444, "abreve": 500, "uhungarumlaut": 556, "ecaron": 444, "Ydieresis": 722, "divide": 570, "Yacute": 722, "Acircumflex": 722, "aacute": 500, "Ucircumflex": 722, "yacute": 500, "scommaaccent": 389, "ecircumflex": 444, "Uring": 722, "Udieresis": 722, "aogonek": 500, "Uacute": 722, "uogonek": 556, "Edieresis": 667, "Dcroat": 722, "commaaccent": 250, "copyright": 747, "Emacron": 667, "ccaron": 444, "aring": 500, "Ncommaaccent": 722, "lacute": 278, "agrave": 500, "Tcommaaccent": 667, "Cacute": 722, "atilde": 500, "Edotaccent": 667, "scaron": 389, "scedilla": 389, "iacute": 278, "lozenge": 494, "Rcaron": 722, "Gcommaaccent": 778, "ucircumflex": 556, "acircumflex": 500, "Amacron": 722, "rcaron": 444, "ccedilla": 444, "Zdotaccent": 667, "Thorn": 611, "Omacron": 778, "Racute": 722, "Sacute": 556, "dcaron": 672, "Umacron": 722, "uring": 556, "threesuperior": 300, "Ograve": 778, "Agrave": 722, "Abreve": 722, "multiply": 570, "uacute": 556, "Tcaron": 667, "partialdiff": 494, "ydieresis": 500, "Nacute": 722, "icircumflex": 278, "Ecircumflex": 667, "adieresis": 500, "edieresis": 444, "cacute": 444, "nacute": 556, "umacron": 556, "Ncaron": 722, "Iacute": 389, "plusminus": 570, "brokenbar": 220, "registered": 747, "Gbreve": 778, "Idotaccent": 389, "summation": 600, "Egrave": 667, "racute": 444, "omacron": 500, "Zacute": 667, "Zcaron": 667, "greaterequal": 549, "Eth": 722, "Ccedilla": 722, "lcommaaccent": 278, "tcaron": 416, "eogonek": 444, "Uogonek": 722, "Aacute": 722, "Adieresis": 722, "egrave": 444, "zacute": 444, "iogonek": 278, "Oacute": 778, "oacute": 500, "amacron": 500, "sacute": 389, "idieresis": 278, "Ocircumflex": 778, "Ugrave": 722, "Delta": 612, "thorn": 556, "twosuperior": 300, "Odieresis": 778, "mu": 556, "igrave": 278, "ohungarumlaut": 500, "Eogonek": 667, "dcroat": 556, "threequarters": 750, "Scedilla": 556, "lcaron": 394, "Kcommaaccent": 778, "Lacute": 667, "trademark": 1000, "edotaccent": 444, "Igrave": 389, "Imacron": 389, "Lcaron": 667, "onehalf": 750, "lessequal": 549, "ocircumflex": 500, "ntilde": 556, "Uhungarumlaut": 722, "Eacute": 667, "emacron": 444, "gbreve": 500, "onequarter": 750, "Scaron": 556, "Scommaaccent": 556, "Ohungarumlaut": 778, "degree": 400, "ograve": 500, "Ccaron": 722, "ugrave": 556, "radical": 549, "Dcaron": 722, "rcommaaccent": 444, "Ntilde": 722, "otilde": 500, "Rcommaaccent": 722, "Lcommaaccent": 667, "Atilde": 722, "Aogonek": 722, "Aring": 722, "Otilde": 778, "zdotaccent": 444, "Ecaron": 667, "Iogonek": 389, "kcommaaccent": 556, "minus": 570, "Icircumflex": 389, "ncaron": 556, "tcommaaccent": 333, "logicalnot": 570, "odieresis": 500, "udieresis": 556, "notequal": 549, "gcommaaccent": 500, "eth": 500, "zcaron": 444, "ncommaaccent": 556, "onesuperior": 300, "imacron": 278, "Euro": 500},
},
"Times-BoldItalic": {
types.NewRectangle(-200.0, -218.0, 996.0, 921.0),
map[string]int{"space": 250, "exclam": 389, "quotedbl": 555, "numbersign": 500, "dollar": 500, "percent": 833, "ampersand": 778, "quoteright": 333, "parenleft": 333, "parenright": 333, "asterisk": 500, "plus": 570, "comma": 250, "hyphen": 333, "period": 250, "slash": 278, "zero": 500, "one": 500, "two": 500, "three": 500, "four": 500, "five": 500, "six": 500, "seven": 500, "eight": 500, "nine": 500, "colon": 333, "semicolon": 333, "less": 570, "equal": 570, "greater": 570, "question": 500, "at": 832, "A": 667, "B": 667, "C": 667, "D": 722, "E": 667, "F": 667, "G": 722, "H": 778, "I": 389, "J": 500, "K": 667, "L": 611, "M": 889, "N": 722, "O": 722, "P": 611, "Q": 722, "R": 667, "S": 556, "T": 611, "U": 722, "V": 667, "W": 889, "X": 667, "Y": 611, "Z": 611, "bracketleft": 333, "backslash": 278, "bracketright": 333, "asciicircum": 570, "underscore": 500, "quoteleft": 333, "a": 500, "b": 500, "c": 444, "d": 500, "e": 444, "f": 333, "g": 500, "h": 556, "i": 278, "j": 278, "k": 500, "l": 278, "m": 778, "n": 556, "o": 500, "p": 500, "q": 500, "r": 389, "s": 389, "t": 278, "u": 556, "v": 444, "w": 667, "x": 500, "y": 444, "z": 389, "braceleft": 348, "bar": 220, "braceright": 348, "asciitilde": 570, "exclamdown": 389, "cent": 500, "sterling": 500, "fraction": 167, "yen": 500, "florin": 500, "section": 500, "currency": 500, "quotesingle": 278, "quotedblleft": 500, "guillemotleft": 500, "guilsinglleft": 333, "guilsinglright": 333, "fi": 556, "fl": 556, "endash": 500, "dagger": 500, "daggerdbl": 500, "periodcentered": 250, "paragraph": 500, "bullet": 350, "quotesinglbase": 333, "quotedblbase": 500, "quotedblright": 500, "guillemotright": 500, "ellipsis": 1000, "perthousand": 1000, "questiondown": 500, "grave": 333, "acute": 333, "circumflex": 333, "tilde": 333, "macron": 333, "breve": 333, "dotaccent": 333, "dieresis": 333, "ring": 333, "cedilla": 333, "hungarumlaut": 333, "ogonek": 333, "caron": 333, "emdash": 1000, "AE": 944, "ordfeminine": 266, "Lslash": 611, "Oslash": 722, "OE": 944, "ordmasculine": 300, "ae": 722, "dotlessi": 278, "lslash": 278, "oslash": 500, "oe": 722, "germandbls": 500, "Idieresis": 389, "eacute": 444, "abreve": 500, "uhungarumlaut": 556, "ecaron": 444, "Ydieresis": 611, "divide": 570, "Yacute": 611, "Acircumflex": 667, "aacute": 500, "Ucircumflex": 722, "yacute": 444, "scommaaccent": 389, "ecircumflex": 444, "Uring": 722, "Udieresis": 722, "aogonek": 500, "Uacute": 722, "uogonek": 556, "Edieresis": 667, "Dcroat": 722, "commaaccent": 250, "copyright": 747, "Emacron": 667, "ccaron": 444, "aring": 500, "Ncommaaccent": 722, "lacute": 278, "agrave": 500, "Tcommaaccent": 611, "Cacute": 667, "atilde": 500, "Edotaccent": 667, "scaron": 389, "scedilla": 389, "iacute": 278, "lozenge": 494, "Rcaron": 667, "Gcommaaccent": 722, "ucircumflex": 556, "acircumflex": 500, "Amacron": 667, "rcaron": 389, "ccedilla": 444, "Zdotaccent": 611, "Thorn": 611, "Omacron": 722, "Racute": 667, "Sacute": 556, "dcaron": 608, "Umacron": 722, "uring": 556, "threesuperior": 300, "Ograve": 722, "Agrave": 667, "Abreve": 667, "multiply": 570, "uacute": 556, "Tcaron": 611, "partialdiff": 494, "ydieresis": 444, "Nacute": 722, "icircumflex": 278, "Ecircumflex": 667, "adieresis": 500, "edieresis": 444, "cacute": 444, "nacute": 556, "umacron": 556, "Ncaron": 722, "Iacute": 389, "plusminus": 570, "brokenbar": 220, "registered": 747, "Gbreve": 722, "Idotaccent": 389, "summation": 600, "Egrave": 667, "racute": 389, "omacron": 500, "Zacute": 611, "Zcaron": 611, "greaterequal": 549, "Eth": 722, "Ccedilla": 667, "lcommaaccent": 278, "tcaron": 366, "eogonek": 444, "Uogonek": 722, "Aacute": 667, "Adieresis": 667, "egrave": 444, "zacute": 389, "iogonek": 278, "Oacute": 722, "oacute": 500, "amacron": 500, "sacute": 389, "idieresis": 278, "Ocircumflex": 722, "Ugrave": 722, "Delta": 612, "thorn": 500, "twosuperior": 300, "Odieresis": 722, "mu": 576, "igrave": 278, "ohungarumlaut": 500, "Eogonek": 667, "dcroat": 500, "threequarters": 750, "Scedilla": 556, "lcaron": 382, "Kcommaaccent": 667, "Lacute": 611, "trademark": 1000, "edotaccent": 444, "Igrave": 389, "Imacron": 389, "Lcaron": 611, "onehalf": 750, "lessequal": 549, "ocircumflex": 500, "ntilde": 556, "Uhungarumlaut": 722, "Eacute": 667, "emacron": 444, "gbreve": 500, "onequarter": 750, "Scaron": 556, "Scommaaccent": 556, "Ohungarumlaut": 722, "degree": 400, "ograve": 500, "Ccaron": 667, "ugrave": 556, "radical": 549, "Dcaron": 722, "rcommaaccent": 389, "Ntilde": 722, "otilde": 500, "Rcommaaccent": 667, "Lcommaaccent": 611, "Atilde": 667, "Aogonek": 667, "Aring": 667, "Otilde": 722, "zdotaccent": 389, "Ecaron": 667, "Iogonek": 389, "kcommaaccent": 500, "minus": 606, "Icircumflex": 389, "ncaron": 556, "tcommaaccent": 278, "logicalnot": 606, "odieresis": 500, "udieresis": 556, "notequal": 549, "gcommaaccent": 500, "eth": 500, "zcaron": 389, "ncommaaccent": 556, "onesuperior": 300, "imacron": 278, "Euro": 500},
},
"Times-Italic": {
types.NewRectangle(-169.0, -217.0, 1010.0, 883.0),
map[string]int{"space": 250, "exclam": 333, "quotedbl": 420, "numbersign": 500, "dollar": 500, "percent": 833, "ampersand": 778, "quoteright": 333, "parenleft": 333, "parenright": 333, "asterisk": 500, "plus": 675, "comma": 250, "hyphen": 333, "period": 250, "slash": 278, "zero": 500, "one": 500, "two": 500, "three": 500, "four": 500, "five": 500, "six": 500, "seven": 500, "eight": 500, "nine": 500, "colon": 333, "semicolon": 333, "less": 675, "equal": 675, "greater": 675, "question": 500, "at": 920, "A": 611, "B": 611, "C": 667, "D": 722, "E": 611, "F": 611, "G": 722, "H": 722, "I": 333, "J": 444, "K": 667, "L": 556, "M": 833, "N": 667, "O": 722, "P": 611, "Q": 722, "R": 611, "S": 500, "T": 556, "U": 722, "V": 611, "W": 833, "X": 611, "Y": 556, "Z": 556, "bracketleft": 389, "backslash": 278, "bracketright": 389, "asciicircum": 422, "underscore": 500, "quoteleft": 333, "a": 500, "b": 500, "c": 444, "d": 500, "e": 444, "f": 278, "g": 500, "h": 500, "i": 278, "j": 278, "k": 444, "l": 278, "m": 722, "n": 500, "o": 500, "p": 500, "q": 500, "r": 389, "s": 389, "t": 278, "u": 500, "v": 444, "w": 667, "x": 444, "y": 444, "z": 389, "braceleft": 400, "bar": 275, "braceright": 400, "asciitilde": 541, "exclamdown": 389, "cent": 500, "sterling": 500, "fraction": 167, "yen": 500, "florin": 500, "section": 500, "currency": 500, "quotesingle": 214, "quotedblleft": 556, "guillemotleft": 500, "guilsinglleft": 333, "guilsinglright": 333, "fi": 500, "fl": 500, "endash": 500, "dagger": 500, "daggerdbl": 500, "periodcentered": 250, "paragraph": 523, "bullet": 350, "quotesinglbase": 333, "quotedblbase": 556, "quotedblright": 556, "guillemotright": 500, "ellipsis": 889, "perthousand": 1000, "questiondown": 500, "grave": 333, "acute": 333, "circumflex": 333, "tilde": 333, "macron": 333, "breve": 333, "dotaccent": 333, "dieresis": 333, "ring": 333, "cedilla": 333, "hungarumlaut": 333, "ogonek": 333, "caron": 333, "emdash": 889, "AE": 889, "ordfeminine": 276, "Lslash": 556, "Oslash": 722, "OE": 944, "ordmasculine": 310, "ae": 667, "dotlessi": 278, "lslash": 278, "oslash": 500, "oe": 667, "germandbls": 500, "Idieresis": 333, "eacute": 444, "abreve": 500, "uhungarumlaut": 500, "ecaron": 444, "Ydieresis": 556, "divide": 675, "Yacute": 556, "Acircumflex": 611, "aacute": 500, "Ucircumflex": 722, "yacute": 444, "scommaaccent": 389, "ecircumflex": 444, "Uring": 722, "Udieresis": 722, "aogonek": 500, "Uacute": 722, "uogonek": 500, "Edieresis": 611, "Dcroat": 722, "commaaccent": 250, "copyright": 760, "Emacron": 611, "ccaron": 444, "aring": 500, "Ncommaaccent": 667, "lacute": 278, "agrave": 500, "Tcommaaccent": 556, "Cacute": 667, "atilde": 500, "Edotaccent": 611, "scaron": 389, "scedilla": 389, "iacute": 278, "lozenge": 471, "Rcaron": 611, "Gcommaaccent": 722, "ucircumflex": 500, "acircumflex": 500, "Amacron": 611, "rcaron": 389, "ccedilla": 444, "Zdotaccent": 556, "Thorn": 611, "Omacron": 722, "Racute": 611, "Sacute": 500, "dcaron": 544, "Umacron": 722, "uring": 500, "threesuperior": 300, "Ograve": 722, "Agrave": 611, "Abreve": 611, "multiply": 675, "uacute": 500, "Tcaron": 556, "partialdiff": 476, "ydieresis": 444, "Nacute": 667, "icircumflex": 278, "Ecircumflex": 611, "adieresis": 500, "edieresis": 444, "cacute": 444, "nacute": 500, "umacron": 500, "Ncaron": 667, "Iacute": 333, "plusminus": 675, "brokenbar": 275, "registered": 760, "Gbreve": 722, "Idotaccent": 333, "summation": 600, "Egrave": 611, "racute": 389, "omacron": 500, "Zacute": 556, "Zcaron": 556, "greaterequal": 549, "Eth": 722, "Ccedilla": 667, "lcommaaccent": 278, "tcaron": 300, "eogonek": 444, "Uogonek": 722, "Aacute": 611, "Adieresis": 611, "egrave": 444, "zacute": 389, "iogonek": 278, "Oacute": 722, "oacute": 500, "amacron": 500, "sacute": 389, "idieresis": 278, "Ocircumflex": 722, "Ugrave": 722, "Delta": 612, "thorn": 500, "twosuperior": 300, "Odieresis": 722, "mu": 500, "igrave": 278, "ohungarumlaut": 500, "Eogonek": 611, "dcroat": 500, "threequarters": 750, "Scedilla": 500, "lcaron": 300, "Kcommaaccent": 667, "Lacute": 556, "trademark": 980, "edotaccent": 444, "Igrave": 333, "Imacron": 333, "Lcaron": 611, "onehalf": 750, "lessequal": 549, "ocircumflex": 500, "ntilde": 500, "Uhungarumlaut": 722, "Eacute": 611, "emacron": 444, "gbreve": 500, "onequarter": 750, "Scaron": 500, "Scommaaccent": 500, "Ohungarumlaut": 722, "degree": 400, "ograve": 500, "Ccaron": 667, "ugrave": 500, "radical": 453, "Dcaron": 722, "rcommaaccent": 389, "Ntilde": 667, "otilde": 500, "Rcommaaccent": 611, "Lcommaaccent": 556, "Atilde": 611, "Aogonek": 611, "Aring": 611, "Otilde": 722, "zdotaccent": 389, "Ecaron": 611, "Iogonek": 333, "kcommaaccent": 444, "minus": 675, "Icircumflex": 333, "ncaron": 500, "tcommaaccent": 278, "logicalnot": 675, "odieresis": 500, "udieresis": 500, "notequal": 549, "gcommaaccent": 500, "eth": 500, "zcaron": 389, "ncommaaccent": 500, "onesuperior": 300, "imacron": 278, "Euro": 500},
},
"Times-Roman": {
types.NewRectangle(-168.0, -218.0, 1000.0, 898.0),
map[string]int{"space": 250, "exclam": 333, "quotedbl": 408, "numbersign": 500, "dollar": 500, "percent": 833, "ampersand": 778, "quoteright": 333, "parenleft": 333, "parenright": 333, "asterisk": 500, "plus": 564, "comma": 250, "hyphen": 333, "period": 250, "slash": 278, "zero": 500, "one": 500, "two": 500, "three": 500, "four": 500, "five": 500, "six": 500, "seven": 500, "eight": 500, "nine": 500, "colon": 278, "semicolon": 278, "less": 564, "equal": 564, "greater": 564, "question": 444, "at": 921, "A": 722, "B": 667, "C": 667, "D": 722, "E": 611, "F": 556, "G": 722, "H": 722, "I": 333, "J": 389, "K": 722, "L": 611, "M": 889, "N": 722, "O": 722, "P": 556, "Q": 722, "R": 667, "S": 556, "T": 611, "U": 722, "V": 722, "W": 944, "X": 722, "Y": 722, "Z": 611, "bracketleft": 333, "backslash": 278, "bracketright": 333, "asciicircum": 469, "underscore": 500, "quoteleft": 333, "a": 444, "b": 500, "c": 444, "d": 500, "e": 444, "f": 333, "g": 500, "h": 500, "i": 278, "j": 278, "k": 500, "l": 278, "m": 778, "n": 500, "o": 500, "p": 500, "q": 500, "r": 333, "s": 389, "t": 278, "u": 500, "v": 500, "w": 722, "x": 500, "y": 500, "z": 444, "braceleft": 480, "bar": 200, "braceright": 480, "asciitilde": 541, "exclamdown": 333, "cent": 500, "sterling": 500, "fraction": 167, "yen": 500, "florin": 500, "section": 500, "currency": 500, "quotesingle": 180, "quotedblleft": 444, "guillemotleft": 500, "guilsinglleft": 333, "guilsinglright": 333, "fi": 556, "fl": 556, "endash": 500, "dagger": 500, "daggerdbl": 500, "periodcentered": 250, "paragraph": 453, "bullet": 350, "quotesinglbase": 333, "quotedblbase": 444, "quotedblright": 444, "guillemotright": 500, "ellipsis": 1000, "perthousand": 1000, "questiondown": 444, "grave": 333, "acute": 333, "circumflex": 333, "tilde": 333, "macron": 333, "breve": 333, "dotaccent": 333, "dieresis": 333, "ring": 333, "cedilla": 333, "hungarumlaut": 333, "ogonek": 333, "caron": 333, "emdash": 1000, "AE": 889, "ordfeminine": 276, "Lslash": 611, "Oslash": 722, "OE": 889, "ordmasculine": 310, "ae": 667, "dotlessi": 278, "lslash": 278, "oslash": 500, "oe": 722, "germandbls": 500, "Idieresis": 333, "eacute": 444, "abreve": 444, "uhungarumlaut": 500, "ecaron": 444, "Ydieresis": 722, "divide": 564, "Yacute": 722, "Acircumflex": 722, "aacute": 444, "Ucircumflex": 722, "yacute": 500, "scommaaccent": 389, "ecircumflex": 444, "Uring": 722, "Udieresis": 722, "aogonek": 444, "Uacute": 722, "uogonek": 500, "Edieresis": 611, "Dcroat": 722, "commaaccent": 250, "copyright": 760, "Emacron": 611, "ccaron": 444, "aring": 444, "Ncommaaccent": 722, "lacute": 278, "agrave": 444, "Tcommaaccent": 611, "Cacute": 667, "atilde": 444, "Edotaccent": 611, "scaron": 389, "scedilla": 389, "iacute": 278, "lozenge": 471, "Rcaron": 667, "Gcommaaccent": 722, "ucircumflex": 500, "acircumflex": 444, "Amacron": 722, "rcaron": 333, "ccedilla": 444, "Zdotaccent": 611, "Thorn": 556, "Omacron": 722, "Racute": 667, "Sacute": 556, "dcaron": 588, "Umacron": 722, "uring": 500, "threesuperior": 300, "Ograve": 722, "Agrave": 722, "Abreve": 722, "multiply": 564, "uacute": 500, "Tcaron": 611, "partialdiff": 476, "ydieresis": 500, "Nacute": 722, "icircumflex": 278, "Ecircumflex": 611, "adieresis": 444, "edieresis": 444, "cacute": 444, "nacute": 500, "umacron": 500, "Ncaron": 722, "Iacute": 333, "plusminus": 564, "brokenbar": 200, "registered": 760, "Gbreve": 722, "Idotaccent": 333, "summation": 600, "Egrave": 611, "racute": 333, "omacron": 500, "Zacute": 611, "Zcaron": 611, "greaterequal": 549, "Eth": 722, "Ccedilla": 667, "lcommaaccent": 278, "tcaron": 326, "eogonek": 444, "Uogonek": 722, "Aacute": 722, "Adieresis": 722, "egrave": 444, "zacute": 444, "iogonek": 278, "Oacute": 722, "oacute": 500, "amacron": 444, "sacute": 389, "idieresis": 278, "Ocircumflex": 722, "Ugrave": 722, "Delta": 612, "thorn": 500, "twosuperior": 300, "Odieresis": 722, "mu": 500, "igrave": 278, "ohungarumlaut": 500, "Eogonek": 611, "dcroat": 500, "threequarters": 750, "Scedilla": 556, "lcaron": 344, "Kcommaaccent": 722, "Lacute": 611, "trademark": 980, "edotaccent": 444, "Igrave": 333, "Imacron": 333, "Lcaron": 611, "onehalf": 750, "lessequal": 549, "ocircumflex": 500, "ntilde": 500, "Uhungarumlaut": 722, "Eacute": 611, "emacron": 444, "gbreve": 500, "onequarter": 750, "Scaron": 556, "Scommaaccent": 556, "Ohungarumlaut": 722, "degree": 400, "ograve": 500, "Ccaron": 667, "ugrave": 500, "radical": 453, "Dcaron": 722, "rcommaaccent": 333, "Ntilde": 722, "otilde": 500, "Rcommaaccent": 667, "Lcommaaccent": 611, "Atilde": 722, "Aogonek": 722, "Aring": 722, "Otilde": 722, "zdotaccent": 444, "Ecaron": 611, "Iogonek": 333, "kcommaaccent": 500, "minus": 564, "Icircumflex": 333, "ncaron": 500, "tcommaaccent": 278, "logicalnot": 564, "odieresis": 500, "udieresis": 500, "notequal": 549, "gcommaaccent": 500, "eth": 500, "zcaron": 444, "ncommaaccent": 500, "onesuperior": 300, "imacron": 278, "Euro": 500},
},
"ZapfDingbats": {
types.NewRectangle(-1.0, -143.0, 981.0, 820.0),
map[string]int{"space": 278, "a1": 974, "a2": 961, "a202": 974, "a3": 980, "a4": 719, "a5": 789, "a119": 790, "a118": 791, "a117": 690, "a11": 960, "a12": 939, "a13": 549, "a14": 855, "a15": 911, "a16": 933, "a105": 911, "a17": 945, "a18": 974, "a19": 755, "a20": 846, "a21": 762, "a22": 761, "a23": 571, "a24": 677, "a25": 763, "a26": 760, "a27": 759, "a28": 754, "a6": 494, "a7": 552, "a8": 537, "a9": 577, "a10": 692, "a29": 786, "a30": 788, "a31": 788, "a32": 790, "a33": 793, "a34": 794, "a35": 816, "a36": 823, "a37": 789, "a38": 841, "a39": 823, "a40": 833, "a41": 816, "a42": 831, "a43": 923, "a44": 744, "a45": 723, "a46": 749, "a47": 790, "a48": 792, "a49": 695, "a50": 776, "a51": 768, "a52": 792, "a53": 759, "a54": 707, "a55": 708, "a56": 682, "a57": 701, "a58": 826, "a59": 815, "a60": 789, "a61": 789, "a62": 707, "a63": 687, "a64": 696, "a65": 689, "a66": 786, "a67": 787, "a68": 713, "a69": 791, "a70": 785, "a71": 791, "a72": 873, "a73": 761, "a74": 762, "a203": 762, "a75": 759, "a204": 759, "a76": 892, "a77": 892, "a78": 788, "a79": 784, "a81": 438, "a82": 138, "a83": 277, "a84": 415, "a97": 392, "a98": 392, "a99": 668, "a100": 668, "a89": 390, "a90": 390, "a93": 317, "a94": 317, "a91": 276, "a92": 276, "a205": 509, "a85": 509, "a206": 410, "a86": 410, "a87": 234, "a88": 234, "a95": 334, "a96": 334, "a101": 732, "a102": 544, "a103": 544, "a104": 910, "a106": 667, "a107": 760, "a108": 760, "a112": 776, "a111": 595, "a110": 694, "a109": 626, "a120": 788, "a121": 788, "a122": 788, "a123": 788, "a124": 788, "a125": 788, "a126": 788, "a127": 788, "a128": 788, "a129": 788, "a130": 788, "a131": 788, "a132": 788, "a133": 788, "a134": 788, "a135": 788, "a136": 788, "a137": 788, "a138": 788, "a139": 788, "a140": 788, "a141": 788, "a142": 788, "a143": 788, "a144": 788, "a145": 788, "a146": 788, "a147": 788, "a148": 788, "a149": 788, "a150": 788, "a151": 788, "a152": 788, "a153": 788, "a154": 788, "a155": 788, "a156": 788, "a157": 788, "a158": 788, "a159": 788, "a160": 894, "a161": 838, "a163": 1016, "a164": 458, "a196": 748, "a165": 924, "a192": 748, "a166": 918, "a167": 927, "a168": 928, "a169": 928, "a170": 834, "a171": 873, "a172": 828, "a173": 924, "a162": 924, "a174": 917, "a175": 930, "a176": 931, "a177": 463, "a178": 883, "a179": 836, "a193": 836, "a180": 867, "a199": 867, "a181": 696, "a200": 696, "a182": 874, "a201": 874, "a183": 760, "a184": 946, "a197": 771, "a185": 865, "a194": 771, "a198": 888, "a186": 967, "a195": 888, "a187": 831, "a188": 873, "a189": 927, "a190": 970, "a191": 918},
},
}

169
vendor/github.com/pdfcpu/pdfcpu/pkg/api/api.go generated vendored Normal file
View File

@ -0,0 +1,169 @@
/*
Copyright 2018 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package api lets you integrate pdfcpu's operations into your Go backend.
//
// There are two api layers supporting all pdfcpu operations:
// 1) The file based layer (used by pdfcpu's cli)
// 2) The io.ReadSeeker/io.Writer based layer for backend integration.
//
// For any pdfcpu command there are two functions.
//
// The file based function always calls the io.ReadSeeker/io.Writer based function:
// func CommandFile(inFile, outFile string, conf *pdf.Configuration) error
// func Command(rs io.ReadSeeker, w io.Writer, conf *pdf.Configuration) error
//
// eg. for optimization:
// func OptimizeFile(inFile, outFile string, conf *pdf.Configuration) error
// func Optimize(rs io.ReadSeeker, w io.Writer, conf *pdf.Configuration) error
package api
import (
"bufio"
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu/validate"
)
// ReadContext uses an io.ReadSeeker to build an internal structure holding its cross reference table aka the Context.
func ReadContext(rs io.ReadSeeker, conf *pdfcpu.Configuration) (*pdfcpu.Context, error) {
return pdfcpu.Read(rs, conf)
}
// ReadContextFile returns inFile's validated context.
func ReadContextFile(inFile string) (*pdfcpu.Context, error) {
f, err := os.Open(inFile)
if err != nil {
return nil, err
}
defer f.Close()
ctx, err := ReadContext(f, pdfcpu.NewDefaultConfiguration())
if err != nil {
return nil, err
}
if err = validate.XRefTable(ctx.XRefTable); err != nil {
return nil, err
}
return ctx, err
}
// ValidateContext validates a PDF context.
func ValidateContext(ctx *pdfcpu.Context) error {
return validate.XRefTable(ctx.XRefTable)
}
// OptimizeContext optimizes a PDF context.
func OptimizeContext(ctx *pdfcpu.Context) error {
return pdfcpu.OptimizeXRefTable(ctx)
}
// WriteContext writes a PDF context to w.
func WriteContext(ctx *pdfcpu.Context, w io.Writer) error {
if f, ok := w.(*os.File); ok {
ctx.Write.Fp = f
}
ctx.Write.Writer = bufio.NewWriter(w)
return pdfcpu.Write(ctx)
}
// WriteContextFile writes a PDF context to outFile.
func WriteContextFile(ctx *pdfcpu.Context, outFile string) error {
f, err := os.Create(outFile)
if err != nil {
return err
}
defer f.Close()
return WriteContext(ctx, f)
}
func readAndValidate(rs io.ReadSeeker, conf *pdfcpu.Configuration, from1 time.Time) (ctx *pdfcpu.Context, dur1, dur2 float64, err error) {
if ctx, err = ReadContext(rs, conf); err != nil {
return nil, 0, 0, err
}
dur1 = time.Since(from1).Seconds()
if conf.ValidationMode == pdfcpu.ValidationNone {
// Bypass validation
return ctx, 0, 0, nil
}
from2 := time.Now()
if err = validate.XRefTable(ctx.XRefTable); err != nil {
return nil, 0, 0, err
}
dur2 = time.Since(from2).Seconds()
return ctx, dur1, dur2, nil
}
func readValidateAndOptimize(rs io.ReadSeeker, conf *pdfcpu.Configuration, from1 time.Time) (ctx *pdfcpu.Context, dur1, dur2, dur3 float64, err error) {
ctx, dur1, dur2, err = readAndValidate(rs, conf, from1)
if err != nil {
return nil, 0, 0, 0, err
}
from3 := time.Now()
if err = OptimizeContext(ctx); err != nil {
return nil, 0, 0, 0, err
}
dur3 = time.Since(from3).Seconds()
return ctx, dur1, dur2, dur3, nil
}
func logOperationStats(ctx *pdfcpu.Context, op string, durRead, durVal, durOpt, durWrite, durTotal float64) {
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdfcpu.TimingStats(op, durRead, durVal, durOpt, durWrite, durTotal)
if ctx.Read.FileSize > 0 {
ctx.Read.LogStats(ctx.Optimized)
ctx.Write.LogStats()
}
}
// EnsureDefaultConfigAt switches to the pdfcpu config dir located at path.
// If path/pdfcpu is not existent, it will be created including config.yml
func EnsureDefaultConfigAt(path string) error {
// Call if you have specific requirements regarding the location of the pdfcpu config dir.
return pdfcpu.EnsureDefaultConfigAt(path)
}
// DisableConfigDir disables the configuration directory.
// Any needed default configuration will be loaded from configuration.go
// Since the config dir also contains the user font dir, this also limits font usage to the default core font set
// No user fonts will be available.
func DisableConfigDir() {
// Call if you don't want to use a specific configuration
// and also do not need to use user fonts.
pdfcpu.ConfigPath = "disable"
}
// LoadConfiguration locates and loads the default configuration
// and also loads installed user fonts.
func LoadConfiguration() {
// Call if you don't have a specific config dir location
// and need to use user fonts for stamping or watermarking.
pdfcpu.NewDefaultConfiguration()
}

331
vendor/github.com/pdfcpu/pdfcpu/pkg/api/attach.go generated vendored Normal file
View File

@ -0,0 +1,331 @@
/*
Copyright 2019 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"fmt"
"io"
"os"
"path/filepath"
"sort"
"strings"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// ListAttachments returns a list of embedded file attachments of rs.
func ListAttachments(rs io.ReadSeeker, conf *pdfcpu.Configuration) ([]string, error) {
if rs == nil {
return nil, errors.New("pdfcpu: ListAttachments: Please provide rs")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return nil, err
}
fromWrite := time.Now()
aa, err := ctx.ListAttachments()
if err != nil {
return nil, err
}
var ss []string
for _, a := range aa {
s := a.FileName
if a.Desc != "" {
s = fmt.Sprintf("%s (%s)", s, a.Desc)
}
ss = append(ss, s)
}
sort.Strings(ss)
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdfcpu.TimingStats("list files", durRead, durVal, durOpt, durWrite, durTotal)
return ss, nil
}
// ListAttachmentsFile returns a list of embedded file attachments of inFile.
func ListAttachmentsFile(inFile string, conf *pdfcpu.Configuration) ([]string, error) {
f, err := os.Open(inFile)
if err != nil {
return nil, err
}
defer f.Close()
return ListAttachments(f, conf)
}
// AddAttachments embeds files into a PDF context read from rs and writes the result to w.
// file is either a file name or a file name and a description separated by a comma.
func AddAttachments(rs io.ReadSeeker, w io.Writer, files []string, coll bool, conf *pdfcpu.Configuration) error {
if rs == nil {
return errors.New("pdfcpu: AddAttachments: Please provide rs")
}
if w == nil {
return errors.New("pdfcpu: AddAttachments: Please provide w")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
from := time.Now()
var ok bool
for _, fn := range files {
s := strings.Split(fn, ",")
if len(s) == 0 || len(s) > 2 {
continue
}
fileName := s[0]
desc := ""
if len(s) == 2 {
desc = s[1]
}
log.CLI.Printf("adding %s\n", fileName)
f, err := os.Open(fileName)
if err != nil {
return err
}
defer f.Close()
fi, err := f.Stat()
if err != nil {
return err
}
mt := fi.ModTime()
a := pdfcpu.Attachment{Reader: f, ID: filepath.Base(fileName), Desc: desc, ModTime: &mt}
if err = ctx.AddAttachment(a, coll); err != nil {
return err
}
ok = true
}
if !ok {
return errors.New("no attachment added")
}
durAdd := time.Since(from).Seconds()
fromWrite := time.Now()
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := durAdd + time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "add attachment, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// AddAttachmentsFile embeds files into a PDF context read from inFile and writes the result to outFile.
func AddAttachmentsFile(inFile, outFile string, files []string, coll bool, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
if outFile == "" || inFile == outFile {
os.Remove(tmpFile)
}
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return AddAttachments(f1, f2, files, coll, conf)
}
// RemoveAttachments deletes embedded files from a PDF context read from rs and writes the result to w.
func RemoveAttachments(rs io.ReadSeeker, w io.Writer, files []string, conf *pdfcpu.Configuration) error {
if rs == nil {
return errors.New("pdfcpu: RemoveAttachments: Please provide rs")
}
if w == nil {
return errors.New("pdfcpu: RemoveAttachments: Please provide w")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
from := time.Now()
var ok bool
if ok, err = ctx.RemoveAttachments(files); err != nil {
return err
}
if !ok {
return errors.New("no attachment removed")
}
durRemove := time.Since(from).Seconds()
fromWrite := time.Now()
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := durRemove + time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "remove att, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// RemoveAttachmentsFile deletes embedded files from a PDF context read from inFile and writes the result to outFile.
func RemoveAttachmentsFile(inFile, outFile string, files []string, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
if outFile == "" || inFile == outFile {
os.Remove(tmpFile)
}
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return RemoveAttachments(f1, f2, files, conf)
}
// ExtractAttachments extracts embedded files from a PDF context read from rs into outDir.
func ExtractAttachments(rs io.ReadSeeker, outDir string, fileNames []string, conf *pdfcpu.Configuration) error {
if rs == nil {
return errors.New("pdfcpu: ExtractAttachments: Please provide rs")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
fromWrite := time.Now()
aa, err := ctx.ExtractAttachments(fileNames)
if err != nil {
return err
}
for _, a := range aa {
fileName := filepath.Join(outDir, a.FileName)
log.CLI.Printf("writing %s\n", fileName)
f, err := os.OpenFile(fileName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
return err
}
if _, err = io.Copy(f, a); err != nil {
return err
}
if err := f.Close(); err != nil {
return err
}
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdfcpu.TimingStats("write files", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// ExtractAttachmentsFile extracts embedded files from a PDF context read from inFile into outDir.
func ExtractAttachmentsFile(inFile, outDir string, files []string, conf *pdfcpu.Configuration) error {
f, err := os.Open(inFile)
if err != nil {
return err
}
defer f.Close()
return ExtractAttachments(f, outDir, files, conf)
}

325
vendor/github.com/pdfcpu/pdfcpu/pkg/api/boxes.go generated vendored Normal file
View File

@ -0,0 +1,325 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// PageBoundariesFromBoxList parses a list of box types.
func PageBoundariesFromBoxList(s string) (*pdfcpu.PageBoundaries, error) {
return pdfcpu.ParseBoxList(s)
}
// PageBoundaries parses a list of box definitions and assignments.
func PageBoundaries(s string, unit pdfcpu.DisplayUnit) (*pdfcpu.PageBoundaries, error) {
return pdfcpu.ParsePageBoundaries(s, unit)
}
// Box parses a box definition.
func Box(s string, u pdfcpu.DisplayUnit) (*pdfcpu.Box, error) {
return pdfcpu.ParseBox(s, u)
}
// ListBoxes returns a list of page boundaries for selected pages of rs.
func ListBoxes(rs io.ReadSeeker, selectedPages []string, pb *pdfcpu.PageBoundaries, conf *pdfcpu.Configuration) ([]string, error) {
if rs == nil {
return nil, errors.New("pdfcpu: ListBoxes: missing rs")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
conf.Cmd = pdfcpu.LISTBOXES
}
ctx, _, _, _, err := readValidateAndOptimize(rs, conf, time.Now())
if err != nil {
return nil, err
}
if err := ctx.EnsurePageCount(); err != nil {
return nil, err
}
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return nil, err
}
return ctx.ListPageBoundaries(pages, pb)
}
// ListBoxesFile returns a list of page boundaries for selected pages of inFile.
func ListBoxesFile(inFile string, selectedPages []string, pb *pdfcpu.PageBoundaries, conf *pdfcpu.Configuration) ([]string, error) {
f, err := os.Open(inFile)
if err != nil {
return nil, err
}
defer f.Close()
if pb == nil {
pb = &pdfcpu.PageBoundaries{}
pb.SelectAll()
}
log.CLI.Printf("listing %s for %s\n", pb, inFile)
return ListBoxes(f, selectedPages, pb, conf)
}
// AddBoxes adds page boundaries for selected pages of rs and writes result to w.
func AddBoxes(rs io.ReadSeeker, w io.Writer, selectedPages []string, pb *pdfcpu.PageBoundaries, conf *pdfcpu.Configuration) error {
if rs == nil {
return errors.New("pdfcpu: AddBoxes: missing rs")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.ADDBOXES
ctx, _, _, _, err := readValidateAndOptimize(rs, conf, time.Now())
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
if err = ctx.AddPageBoundaries(pages, pb); err != nil {
return err
}
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctx); err != nil {
return err
}
}
return WriteContext(ctx, w)
}
// AddBoxesFile adds page boundaries for selected pages of inFile and writes result to outFile.
func AddBoxesFile(inFile, outFile string, selectedPages []string, pb *pdfcpu.PageBoundaries, conf *pdfcpu.Configuration) error {
log.CLI.Printf("adding %s for %s\n", pb, inFile)
var (
f1, f2 *os.File
err error
)
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return AddBoxes(f1, f2, selectedPages, pb, conf)
}
// RemoveBoxes removes page boundaries as specified in pb for selected pages of rs and writes result to w.
func RemoveBoxes(rs io.ReadSeeker, w io.Writer, selectedPages []string, pb *pdfcpu.PageBoundaries, conf *pdfcpu.Configuration) error {
if rs == nil {
return errors.New("pdfcpu: RemoveBoxes: missing rs")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.REMOVEBOXES
ctx, _, _, _, err := readValidateAndOptimize(rs, conf, time.Now())
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
if err = ctx.RemovePageBoundaries(pages, pb); err != nil {
return err
}
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctx); err != nil {
return err
}
}
return WriteContext(ctx, w)
}
// RemoveBoxesFile removes page boundaries as specified in pb for selected pages of inFile and writes result to outFile.
func RemoveBoxesFile(inFile, outFile string, selectedPages []string, pb *pdfcpu.PageBoundaries, conf *pdfcpu.Configuration) error {
log.CLI.Printf("removing %s for %s\n", pb, inFile)
var (
f1, f2 *os.File
err error
)
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return RemoveBoxes(f1, f2, selectedPages, pb, conf)
}
// Crop adds crop boxes for selected pages of rs and writes result to w.
func Crop(rs io.ReadSeeker, w io.Writer, selectedPages []string, b *pdfcpu.Box, conf *pdfcpu.Configuration) error {
if rs == nil {
return errors.New("pdfcpu: Crop: missing rs")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.CROP
ctx, _, _, _, err := readValidateAndOptimize(rs, conf, time.Now())
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
if err = ctx.Crop(pages, b); err != nil {
return err
}
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctx); err != nil {
return err
}
}
return WriteContext(ctx, w)
}
// CropFile adds crop boxes for selected pages of inFile and writes result to outFile.
func CropFile(inFile, outFile string, selectedPages []string, b *pdfcpu.Box, conf *pdfcpu.Configuration) error {
log.CLI.Printf("cropping %s\n", inFile)
var (
f1, f2 *os.File
err error
)
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return Crop(f1, f2, selectedPages, b, conf)
}

104
vendor/github.com/pdfcpu/pdfcpu/pkg/api/collect.go generated vendored Normal file
View File

@ -0,0 +1,104 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
)
// Collect creates a custom PDF page sequence for selected pages of rs and writes the result to w.
func Collect(rs io.ReadSeeker, w io.Writer, selectedPages []string, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.COLLECT
fromStart := time.Now()
ctx, _, _, _, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
pages, err := PagesForPageCollection(ctx.PageCount, selectedPages)
if err != nil {
return err
}
ctxDest, err := ctx.ExtractPages(pages, true)
if err != nil {
return err
}
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctxDest); err != nil {
return err
}
}
return WriteContext(ctxDest, w)
}
// CollectFile creates a custom PDF page sequence for inFile and writes the result to outFile.
func CollectFile(inFile, outFile string, selectedPages []string, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return Collect(f1, f2, selectedPages, conf)
}

34
vendor/github.com/pdfcpu/pdfcpu/pkg/api/create.go generated vendored Normal file
View File

@ -0,0 +1,34 @@
/*
Copyright 2019 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"os"
pdf "github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
)
// CreatePDFFile creates a PDF file for an xRefTable and writes it to outFile.
func CreatePDFFile(xRefTable *pdf.XRefTable, outFile string, conf *pdf.Configuration) error {
f, err := os.Create(outFile)
if err != nil {
return err
}
defer f.Close()
ctx := pdf.CreateContext(xRefTable, conf)
return WriteContext(ctx, f)
}

66
vendor/github.com/pdfcpu/pdfcpu/pkg/api/crypto.go generated vendored Normal file
View File

@ -0,0 +1,66 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// EncryptFile encrypts inFile and writes the result to outFile.
// A configuration containing the current passwords is required.
func EncryptFile(inFile, outFile string, conf *pdfcpu.Configuration) error {
if conf == nil {
return errors.New("pdfcpu: missing configuration for encryption")
}
conf.Cmd = pdfcpu.ENCRYPT
return OptimizeFile(inFile, outFile, conf)
}
// DecryptFile decrypts inFile and writes the result to outFile.
// A configuration containing the current passwords is required.
func DecryptFile(inFile, outFile string, conf *pdfcpu.Configuration) error {
if conf == nil {
return errors.New("pdfcpu: missing configuration for decryption")
}
conf.Cmd = pdfcpu.DECRYPT
return OptimizeFile(inFile, outFile, conf)
}
// ChangeUserPasswordFile reads inFile, changes the user password and writes the result to outFile.
// A configuration containing the current passwords is required.
func ChangeUserPasswordFile(inFile, outFile string, pwOld, pwNew string, conf *pdfcpu.Configuration) error {
if conf == nil {
return errors.New("pdfcpu: missing configuration for change user password")
}
conf.Cmd = pdfcpu.CHANGEUPW
conf.UserPW = pwOld
conf.UserPWNew = &pwNew
return OptimizeFile(inFile, outFile, conf)
}
// ChangeOwnerPasswordFile reads inFile, changes the user password and writes the result to outFile.
// A configuration containing the current passwords is required.
func ChangeOwnerPasswordFile(inFile, outFile string, pwOld, pwNew string, conf *pdfcpu.Configuration) error {
if conf == nil {
return errors.New("pdfcpu: missing configuration for change owner password")
}
conf.Cmd = pdfcpu.CHANGEOPW
conf.OwnerPW = pwOld
conf.OwnerPWNew = &pwNew
return OptimizeFile(inFile, outFile, conf)
}

358
vendor/github.com/pdfcpu/pdfcpu/pkg/api/extract.go generated vendored Normal file
View File

@ -0,0 +1,358 @@
/*
Copyright 2019 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"fmt"
"io"
"os"
"path/filepath"
"strings"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// ExtractImages dumps embedded image resources from rs into outDir for selected pages.
func ExtractImages(rs io.ReadSeeker, outDir, fileName string, selectedPages []string, conf *pdfcpu.Configuration) error {
if rs == nil {
return errors.New("pdfcpu: ExtractImages: Please provide rs")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
fromWrite := time.Now()
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
fileName = strings.TrimSuffix(filepath.Base(fileName), ".pdf")
for i, v := range pages {
if !v {
continue
}
ii, err := ctx.ExtractPageImages(i)
if err != nil {
return err
}
for _, img := range ii {
outFile := filepath.Join(outDir, fmt.Sprintf("%s_%d_%s.%s", fileName, i, img.Name, img.Type))
log.CLI.Printf("writing %s\n", outFile)
w, err := os.Create(outFile)
if err != nil {
return err
}
if _, err = io.Copy(w, img); err != nil {
return err
}
if err := w.Close(); err != nil {
return err
}
}
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdfcpu.TimingStats("write images", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// ExtractImagesFile dumps embedded image resources from inFile into outDir for selected pages.
func ExtractImagesFile(inFile, outDir string, selectedPages []string, conf *pdfcpu.Configuration) error {
f, err := os.Open(inFile)
if err != nil {
return err
}
defer f.Close()
log.CLI.Printf("extracting images from %s into %s/ ...\n", inFile, outDir)
return ExtractImages(f, outDir, filepath.Base(inFile), selectedPages, conf)
}
// ExtractFonts dumps embedded fontfiles from rs into outDir for selected pages.
func ExtractFonts(rs io.ReadSeeker, outDir, fileName string, selectedPages []string, conf *pdfcpu.Configuration) error {
if rs == nil {
return errors.New("pdfcpu: ExtractFonts: Please provide rs")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
fromWrite := time.Now()
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
fileName = strings.TrimSuffix(filepath.Base(fileName), ".pdf")
for i, v := range pages {
if !v {
continue
}
ff, err := ctx.ExtractPageFonts(i)
if err != nil {
return err
}
for _, f := range ff {
outFile := filepath.Join(outDir, fmt.Sprintf("%s_%s.%s", fileName, f.Name, f.Type))
log.CLI.Printf("writing %s\n", outFile)
w, err := os.Create(outFile)
if err != nil {
return err
}
if _, err = io.Copy(w, f); err != nil {
return err
}
if err := w.Close(); err != nil {
return err
}
}
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdfcpu.TimingStats("write fonts", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// ExtractFontsFile dumps embedded fontfiles from inFile into outDir for selected pages.
func ExtractFontsFile(inFile, outDir string, selectedPages []string, conf *pdfcpu.Configuration) error {
f, err := os.Open(inFile)
if err != nil {
return err
}
defer f.Close()
log.CLI.Printf("extracting fonts from %s into %s/ ...\n", inFile, outDir)
return ExtractFonts(f, outDir, filepath.Base(inFile), selectedPages, conf)
}
// ExtractPages generates single page PDF files from rs in outDir for selected pages.
func ExtractPages(rs io.ReadSeeker, outDir, fileName string, selectedPages []string, conf *pdfcpu.Configuration) error {
if rs == nil {
return errors.New("pdfcpu: ExtractPages: Please provide rs")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
conf.Cmd = pdfcpu.EXTRACTPAGES
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
fromWrite := time.Now()
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
fileName = strings.TrimSuffix(filepath.Base(fileName), ".pdf")
for i, v := range pages {
if !v {
continue
}
ctxNew, err := ctx.ExtractPage(i)
if err != nil {
return err
}
outFile := filepath.Join(outDir, fmt.Sprintf("%s_page_%d.pdf", fileName, i))
log.CLI.Printf("writing %s\n", outFile)
if err := WriteContextFile(ctxNew, outFile); err != nil {
return err
}
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdfcpu.TimingStats("write PDFs", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// ExtractPagesFile generates single page PDF files from inFile in outDir for selected pages.
func ExtractPagesFile(inFile, outDir string, selectedPages []string, conf *pdfcpu.Configuration) error {
f, err := os.Open(inFile)
if err != nil {
return err
}
defer f.Close()
log.CLI.Printf("extracting pages from %s into %s/ ...\n", inFile, outDir)
return ExtractPages(f, outDir, filepath.Base(inFile), selectedPages, conf)
}
// ExtractContent dumps "PDF source" files from rs into outDir for selected pages.
func ExtractContent(rs io.ReadSeeker, outDir, fileName string, selectedPages []string, conf *pdfcpu.Configuration) error {
if rs == nil {
return errors.New("pdfcpu: ExtractContent: Please provide rs")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
fromWrite := time.Now()
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
fileName = strings.TrimSuffix(filepath.Base(fileName), ".pdf")
for p, v := range pages {
if !v {
continue
}
r, err := ctx.ExtractPageContent(p)
if err != nil {
return err
}
if r == nil {
continue
}
outFile := filepath.Join(outDir, fmt.Sprintf("%s_Content_page_%d.txt", fileName, p))
log.CLI.Printf("writing %s\n", outFile)
f, err := os.Create(outFile)
if err != nil {
return err
}
if _, err = io.Copy(f, r); err != nil {
return err
}
if err := f.Close(); err != nil {
return err
}
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdfcpu.TimingStats("write content", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// ExtractContentFile dumps "PDF source" files from inFile into outDir for selected pages.
func ExtractContentFile(inFile, outDir string, selectedPages []string, conf *pdfcpu.Configuration) error {
f, err := os.Open(inFile)
if err != nil {
return err
}
defer f.Close()
log.CLI.Printf("extracting content from %s into %s/ ...\n", inFile, outDir)
return ExtractContent(f, outDir, inFile, selectedPages, conf)
}
// ExtractMetadata dumps all metadata dict entries for rs into outDir.
func ExtractMetadata(rs io.ReadSeeker, outDir, fileName string, conf *pdfcpu.Configuration) error {
if rs == nil {
return errors.New("pdfcpu: ExtractMetadata: Please provide rs")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
fromWrite := time.Now()
mm, err := ctx.ExtractMetadata()
if err != nil {
return err
}
if len(mm) > 0 {
fileName = strings.TrimSuffix(filepath.Base(fileName), ".pdf")
for _, m := range mm {
outFile := filepath.Join(outDir, fmt.Sprintf("%s_Metadata_%s_%d_%d.txt", fileName, m.ParentType, m.ParentObjNr, m.ObjNr))
log.CLI.Printf("writing %s\n", outFile)
f, err := os.Create(outFile)
if err != nil {
return err
}
if _, err = io.Copy(f, m); err != nil {
return err
}
if err := f.Close(); err != nil {
return err
}
}
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdfcpu.TimingStats("write metadata", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// ExtractMetadataFile dumps all metadata dict entries for inFile into outDir.
func ExtractMetadataFile(inFile, outDir string, conf *pdfcpu.Configuration) error {
f, err := os.Open(inFile)
if err != nil {
return err
}
defer f.Close()
log.CLI.Printf("extracting metadata from %s into %s/ ...\n", inFile, outDir)
return ExtractMetadata(f, outDir, filepath.Base(inFile), conf)
}

235
vendor/github.com/pdfcpu/pdfcpu/pkg/api/fonts.go generated vendored Normal file
View File

@ -0,0 +1,235 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"bytes"
"fmt"
"path/filepath"
"sort"
"strings"
"unicode/utf8"
"github.com/pdfcpu/pdfcpu/pkg/font"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
pdf "github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
func isSupportedFontFile(filename string) bool {
return strings.HasSuffix(strings.ToLower(filename), ".gob")
}
// ListFonts returns a list of supported fonts.
func ListFonts() ([]string, error) {
// Get list of PDF core fonts.
coreFonts := font.CoreFontNames()
for i, s := range coreFonts {
coreFonts[i] = " " + s
}
sort.Strings(coreFonts)
sscf := []string{"Corefonts:"}
sscf = append(sscf, coreFonts...)
// Get installed fonts from pdfcpu config dir in users home dir
userFonts := font.UserFontNamesVerbose()
for i, s := range userFonts {
userFonts[i] = " " + s
}
sort.Strings(userFonts)
ssuf := []string{fmt.Sprintf("Userfonts(%s):", font.UserFontDir)}
ssuf = append(ssuf, userFonts...)
sscf = append(sscf, "")
return append(sscf, ssuf...), nil
}
// InstallFonts installs true type fonts for embedding.
func InstallFonts(fileNames []string) error {
log.CLI.Printf("installing to %s...", font.UserFontDir)
for _, fn := range fileNames {
switch filepath.Ext(fn) {
case ".ttf":
//log.CLI.Println(filepath.Base(fn))
if err := font.InstallTrueTypeFont(font.UserFontDir, fn); err != nil {
log.CLI.Printf("%v", err)
}
case ".ttc":
//log.CLI.Println(filepath.Base(fn))
if err := font.InstallTrueTypeCollection(font.UserFontDir, fn); err != nil {
log.CLI.Printf("%v", err)
}
}
}
return font.LoadUserFonts()
}
func rowLabel(i int, td pdf.TextDescriptor, baseFontName, baseFontKey string, buf *bytes.Buffer, mb *pdf.Rectangle, left bool) {
x := 39.
if !left {
x = 7750
}
s := fmt.Sprintf("#%02X", i)
td.X, td.Y, td.Text = x, float64(7677-i*30), s
td.StrokeCol, td.FillCol = pdf.Black, pdf.SimpleColor{B: .8}
td.FontName, td.FontKey, td.FontSize = baseFontName, baseFontKey, 14
pdf.WriteMultiLine(buf, mb, nil, td)
}
func columnsLabel(td pdf.TextDescriptor, baseFontName, baseFontKey string, buf *bytes.Buffer, mb *pdf.Rectangle, top bool) {
y := 7700.
if !top {
y = 0
}
td.FontName, td.FontKey = baseFontName, baseFontKey
for i := 0; i < 256; i++ {
s := fmt.Sprintf("#%02X", i)
td.X, td.Y, td.Text, td.FontSize = float64(70+i*30), y, s, 14
td.StrokeCol, td.FillCol = pdf.Black, pdf.SimpleColor{B: .8}
pdf.WriteMultiLine(buf, mb, nil, td)
}
}
func surrogate(r rune) bool {
return r >= 0xD800 && r <= 0xDFFF
}
func writeUserFontDemoContent(p pdf.Page, fontName string, plane int) {
baseFontName := "Helvetica"
baseFontSize := 24
baseFontKey := p.Fm.EnsureKey(baseFontName)
fontKey := p.Fm.EnsureKey(fontName)
fontSize := 24
fillCol := pdf.NewSimpleColor(0xf7e6c7)
pdf.DrawGrid(p.Buf, 16*16, 16*16, pdf.RectForWidthAndHeight(55, 16, 16*480, 16*480), pdf.Black, &fillCol)
td := pdf.TextDescriptor{
FontName: fontName,
FontKey: fontKey,
FontSize: baseFontSize,
HAlign: pdf.AlignCenter,
VAlign: pdf.AlignBaseline,
Scale: 1.0,
ScaleAbs: true,
RMode: pdf.RMFill,
StrokeCol: pdf.Black,
FillCol: pdf.NewSimpleColor(0xab6f30),
ShowBackground: true,
BackgroundCol: pdf.SimpleColor{R: 1., G: .98, B: .77},
}
from := plane * 0x10000
to := (plane+1)*0x10000 - 1
s := fmt.Sprintf("%s %d points (%04X - %04X)", fontName, fontSize, from, to)
td.X, td.Y, td.Text = p.MediaBox.Width()/2, 7750, s
td.FontName, td.FontKey = baseFontName, baseFontKey
td.StrokeCol, td.FillCol = pdf.NewSimpleColor(0x77bdbd), pdf.NewSimpleColor(0xab6f30)
pdf.WriteMultiLine(p.Buf, p.MediaBox, nil, td)
columnsLabel(td, baseFontName, baseFontKey, p.Buf, p.MediaBox, true)
base := rune(plane * 0x10000)
for j := 0; j < 256; j++ {
rowLabel(j, td, baseFontName, baseFontKey, p.Buf, p.MediaBox, true)
buf := make([]byte, 4)
td.StrokeCol, td.FillCol = pdf.Black, pdf.Black
td.FontName, td.FontKey, td.FontSize = fontName, fontKey, fontSize-2
for i := 0; i < 256; i++ {
r := base + rune(j*256+i)
s = " "
if !surrogate(r) {
n := utf8.EncodeRune(buf, r)
s = string(buf[:n])
}
td.X, td.Y, td.Text = float64(70+i*30), float64(7672-j*30), s
pdf.WriteMultiLine(p.Buf, p.MediaBox, nil, td)
}
rowLabel(j, td, baseFontName, baseFontKey, p.Buf, p.MediaBox, false)
}
columnsLabel(td, baseFontName, baseFontKey, p.Buf, p.MediaBox, false)
}
func createUserFontDemoPage(w, h, plane int, fontName string) pdf.Page {
mediaBox := pdf.RectForDim(float64(w), float64(h))
p := pdf.NewPageWithBg(mediaBox, pdf.NewSimpleColor(0xbeded9))
writeUserFontDemoContent(p, fontName, plane)
return p
}
func planeString(i int) string {
switch i {
case 0:
return "BMP" // Basic Multilingual Plane
case 1:
return "SMP" // Supplementary Multilingual Plane
case 2:
return "SIP" // Supplementary Ideographic Plane
case 3:
return "TIP" // Tertiary Ideographic Plane
case 14:
return "SSP" // Supplementary Special-purpose Plane
case 15:
return "SPUA" // Supplementary Private Use Area Plane
}
return ""
}
// CreateUserFontDemoFiles creates single page PDF for each Unicode plane covered.
func CreateUserFontDemoFiles(dir, fn string) error {
w, h := 7800, 7800
ttf, ok := font.UserFontMetrics[fn]
if !ok {
return errors.Errorf("pdfcpu: font %s not available\n", fn)
}
// Create a single page PDF for each Unicode plane with existing glyphs.
for i := range ttf.Planes {
p := createUserFontDemoPage(w, h, i, fn)
xRefTable, err := pdfcpu.CreateDemoXRef(p)
if err != nil {
return err
}
fileName := filepath.Join(dir, fn+"_"+planeString(i)+".pdf")
if err := CreatePDFFile(xRefTable, fileName, nil); err != nil {
return err
}
}
return nil
}
// CreateCheatSheetsUserFonts creates single page PDF cheat sheets for installed user fonts.
func CreateCheatSheetsUserFonts(fontNames []string) error {
if len(fontNames) == 0 {
fontNames = font.UserFontNames()
}
sort.Strings(fontNames)
for _, fn := range fontNames {
if !font.IsUserFont(fn) {
log.CLI.Printf("unknown user font: %s\n", fn)
continue
}
log.CLI.Println("creating cheatsheets for: " + fn)
if err := CreateUserFontDemoFiles(".", fn); err != nil {
return err
}
}
return nil
}

170
vendor/github.com/pdfcpu/pdfcpu/pkg/api/importImage.go generated vendored Normal file
View File

@ -0,0 +1,170 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"bufio"
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
)
// Import parses an Import command string into an internal structure.
func Import(s string, u pdfcpu.DisplayUnit) (*pdfcpu.Import, error) {
return pdfcpu.ParseImportDetails(s, u)
}
// ImportImages appends PDF pages containing images to rs and writes the result to w.
// If rs == nil a new PDF file will be written to w.
func ImportImages(rs io.ReadSeeker, w io.Writer, imgs []io.Reader, imp *pdfcpu.Import, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.IMPORTIMAGES
if imp == nil {
imp = pdfcpu.DefaultImportConfig()
}
var (
ctx *pdfcpu.Context
err error
)
if rs != nil {
ctx, _, _, err = readAndValidate(rs, conf, time.Now())
} else {
ctx, err = pdfcpu.CreateContextWithXRefTable(conf, imp.PageDim)
}
if err != nil {
return err
}
pagesIndRef, err := ctx.Pages()
if err != nil {
return err
}
// This is the page tree root.
pagesDict, err := ctx.DereferenceDict(*pagesIndRef)
if err != nil {
return err
}
for _, r := range imgs {
indRef, err := pdfcpu.NewPageForImage(ctx.XRefTable, r, pagesIndRef, imp)
if err != nil {
return err
}
if err = pdfcpu.AppendPageTree(indRef, 1, pagesDict); err != nil {
return err
}
ctx.PageCount++
}
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctx); err != nil {
return err
}
}
if err = WriteContext(ctx, w); err != nil {
return err
}
log.Stats.Printf("XRefTable:\n%s\n", ctx)
return nil
}
func fileExists(filename string) bool {
f, err := os.Open(filename)
defer f.Close()
return err == nil
}
// ImportImagesFile appends PDF pages containing images to outFile which will be created if necessary.
func ImportImagesFile(imgFiles []string, outFile string, imp *pdfcpu.Import, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
rs := io.ReadSeeker(nil)
f1 = nil
tmpFile := outFile
if fileExists(outFile) {
if f1, err = os.Open(outFile); err != nil {
return err
}
rs = f1
tmpFile += ".tmp"
log.CLI.Printf("appending to %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", outFile)
}
rc := make([]io.ReadCloser, len(imgFiles))
rr := make([]io.Reader, len(imgFiles))
for i, fn := range imgFiles {
f, err := os.Open(fn)
if err != nil {
return err
}
rc[i] = f
rr[i] = bufio.NewReader(f)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
if f1 != nil {
f1.Close()
os.Remove(tmpFile)
}
for _, f := range rc {
f.Close()
}
return
}
if err = f2.Close(); err != nil {
return
}
if f1 != nil {
if err = f1.Close(); err != nil {
return
}
if err = os.Rename(tmpFile, outFile); err != nil {
return
}
}
for _, f := range rc {
if err := f.Close(); err != nil {
return
}
}
}()
return ImportImages(rs, f2, rr, imp, conf)
}

60
vendor/github.com/pdfcpu/pdfcpu/pkg/api/info.go generated vendored Normal file
View File

@ -0,0 +1,60 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
)
// Info returns information about rs.
func Info(rs io.ReadSeeker, selectedPages []string, conf *pdfcpu.Configuration) ([]string, error) {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
} else {
// Validation loads infodict.
conf.ValidationMode = pdfcpu.ValidationRelaxed
}
ctx, _, _, err := readAndValidate(rs, conf, time.Now())
if err != nil {
return nil, err
}
if err := ctx.EnsurePageCount(); err != nil {
return nil, err
}
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, false)
if err != nil {
return nil, err
}
if err := ctx.DetectWatermarks(); err != nil {
return nil, err
}
return ctx.InfoDigest(pages)
}
// InfoFile returns information about inFile.
func InfoFile(inFile string, selectedPages []string, conf *pdfcpu.Configuration) ([]string, error) {
f, err := os.Open(inFile)
if err != nil {
return nil, err
}
defer f.Close()
return Info(f, selectedPages, conf)
}

221
vendor/github.com/pdfcpu/pdfcpu/pkg/api/keywords.go generated vendored Normal file
View File

@ -0,0 +1,221 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
pdf "github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// ListKeywords returns the keyword list of rs.
func ListKeywords(rs io.ReadSeeker, conf *pdf.Configuration) ([]string, error) {
if conf == nil {
conf = pdf.NewDefaultConfiguration()
} else {
// Validation loads infodict.
conf.ValidationMode = pdf.ValidationRelaxed
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return nil, err
}
fromWrite := time.Now()
list, err := pdf.KeywordsList(ctx.XRefTable)
if err != nil {
return nil, err
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdf.TimingStats("list files", durRead, durVal, durOpt, durWrite, durTotal)
return list, nil
}
// ListKeywordsFile returns the keyword list of inFile.
func ListKeywordsFile(inFile string, conf *pdf.Configuration) ([]string, error) {
f, err := os.Open(inFile)
if err != nil {
return nil, err
}
defer f.Close()
return ListKeywords(f, conf)
}
// AddKeywords embeds files into a PDF context read from rs and writes the result to w.
func AddKeywords(rs io.ReadSeeker, w io.Writer, files []string, conf *pdf.Configuration) error {
if conf == nil {
conf = pdf.NewDefaultConfiguration()
} else {
// Validation loads infodict.
conf.ValidationMode = pdf.ValidationRelaxed
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
from := time.Now()
if err = pdf.KeywordsAdd(ctx.XRefTable, files); err != nil {
return err
}
durAdd := time.Since(from).Seconds()
fromWrite := time.Now()
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := durAdd + time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "add keyword, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// AddKeywordsFile embeds files into a PDF context read from inFile and writes the result to outFile.
func AddKeywordsFile(inFile, outFile string, files []string, conf *pdf.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
if outFile == "" || inFile == outFile {
os.Remove(tmpFile)
}
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return AddKeywords(f1, f2, files, conf)
}
// RemoveKeywords deletes embedded files from a PDF context read from rs and writes the result to w.
func RemoveKeywords(rs io.ReadSeeker, w io.Writer, keywords []string, conf *pdf.Configuration) error {
if conf == nil {
conf = pdf.NewDefaultConfiguration()
} else {
// Validation loads infodict.
conf.ValidationMode = pdf.ValidationRelaxed
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
from := time.Now()
var ok bool
if ok, err = pdf.KeywordsRemove(ctx.XRefTable, keywords); err != nil {
return err
}
if !ok {
return errors.New("no keyword removed")
}
durRemove := time.Since(from).Seconds()
fromWrite := time.Now()
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := durRemove + time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "remove att, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// RemoveKeywordsFile deletes embedded files from a PDF context read from inFile and writes the result to outFile.
func RemoveKeywordsFile(inFile, outFile string, keywords []string, conf *pdf.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
if outFile == "" || inFile == outFile {
os.Remove(tmpFile)
}
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return RemoveKeywords(f1, f2, keywords, conf)
}

198
vendor/github.com/pdfcpu/pdfcpu/pkg/api/merge.go generated vendored Normal file
View File

@ -0,0 +1,198 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// appendTo appends inFile to ctxDest's page tree.
func appendTo(rs io.ReadSeeker, ctxDest *pdfcpu.Context) error {
ctxSource, _, _, err := readAndValidate(rs, ctxDest.Configuration, time.Now())
if err != nil {
return err
}
// Merge the source context into the dest context.
return pdfcpu.MergeXRefTables(ctxSource, ctxDest)
}
// ReadSeekerCloser combines io.ReadSeeker and io.Closer
type ReadSeekerCloser interface {
io.ReadSeeker
io.Closer
}
// Merge merges a sequence of PDF streams and writes the result to w.
func Merge(rsc []io.ReadSeeker, w io.Writer, conf *pdfcpu.Configuration) error {
if rsc == nil {
return errors.New("pdfcpu: Merge: Please provide rsc")
}
if w == nil {
return errors.New("pdfcpu: Merge: Please provide w")
}
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.MERGECREATE
ctxDest, _, _, err := readAndValidate(rsc[0], conf, time.Now())
if err != nil {
return err
}
ctxDest.EnsureVersionForWriting()
// Repeatedly merge files into fileDest's xref table.
for _, f := range rsc[1:] {
if err = appendTo(f, ctxDest); err != nil {
return err
}
}
if err = OptimizeContext(ctxDest); err != nil {
return err
}
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctxDest); err != nil {
return err
}
}
return WriteContext(ctxDest, w)
}
// MergeCreateFile merges a sequence of inFiles and writes the result to outFile.
// This operation corresponds to file concatenation in the order specified by inFiles.
// The first entry of inFiles serves as the destination context where all remaining files get merged into.
func MergeCreateFile(inFiles []string, outFile string, conf *pdfcpu.Configuration) error {
ff := []*os.File(nil)
for _, f := range inFiles {
log.CLI.Println(f)
f, err := os.Open(f)
if err != nil {
return err
}
ff = append(ff, f)
}
f, err := os.Create(outFile)
if err != nil {
return err
}
defer func() {
if err != nil {
f.Close()
for _, f := range ff {
f.Close()
}
}
if err = f.Close(); err != nil {
return
}
for _, f := range ff {
if err = f.Close(); err != nil {
return
}
}
}()
rs := make([]io.ReadSeeker, len(ff))
for i, f := range ff {
rs[i] = f
}
log.CLI.Printf("writing %s...\n", outFile)
return Merge(rs, f, conf)
}
func prepareReadSeekers(ff []*os.File) []io.ReadSeeker {
rss := make([]io.ReadSeeker, len(ff))
for i, f := range ff {
rss[i] = f
}
return rss
}
// MergeAppendFile merges a sequence of inFiles and writes the result to outFile.
// This operation corresponds to file concatenation in the order specified by inFiles.
// If outFile already exists, inFiles will be appended.
func MergeAppendFile(inFiles []string, outFile string, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
tmpFile := outFile
if fileExists(outFile) {
if f1, err = os.Open(outFile); err != nil {
return err
}
tmpFile += ".tmp"
log.CLI.Printf("appending to %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", outFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
ff := []*os.File(nil)
if f1 != nil {
ff = append(ff, f1)
}
for _, f := range inFiles {
log.CLI.Println(f)
f, err := os.Open(f)
if err != nil {
return err
}
ff = append(ff, f)
}
defer func() {
if err != nil {
f2.Close()
if f1 != nil {
os.Remove(tmpFile)
}
for _, f := range ff {
f.Close()
}
return
}
if err = f2.Close(); err != nil {
return
}
if f1 != nil {
if err = os.Rename(tmpFile, outFile); err != nil {
return
}
}
for _, f := range ff {
if err = f.Close(); err != nil {
return
}
}
}()
return Merge(prepareReadSeekers(ff), f2, conf)
}

175
vendor/github.com/pdfcpu/pdfcpu/pkg/api/nup.go generated vendored Normal file
View File

@ -0,0 +1,175 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
)
// PDFNUp returns an NUp configuration for Nup-ing PDF files.
func PDFNUp(val int, desc string) (*pdfcpu.NUp, error) {
return pdfcpu.PDFNUpConfig(val, desc)
}
// ImageNUp returns an NUp configuration for Nup-ing image files.
func ImageNUp(val int, desc string) (*pdfcpu.NUp, error) {
return pdfcpu.ImageNUpConfig(val, desc)
}
// PDFGrid returns a grid configuration for Nup-ing PDF files.
func PDFGrid(rows, cols int, desc string) (*pdfcpu.NUp, error) {
return pdfcpu.PDFGridConfig(rows, cols, desc)
}
// ImageGrid returns a grid configuration for Nup-ing image files.
func ImageGrid(rows, cols int, desc string) (*pdfcpu.NUp, error) {
return pdfcpu.ImageGridConfig(rows, cols, desc)
}
// NUpFromImage creates a single page n-up PDF for one image
// or a sequence of n-up pages for more than one image.
func NUpFromImage(conf *pdfcpu.Configuration, imageFileNames []string, nup *pdfcpu.NUp) (*pdfcpu.Context, error) {
if nup.PageDim == nil {
// Set default paper size.
nup.PageDim = pdfcpu.PaperSize[nup.PageSize]
}
ctx, err := pdfcpu.CreateContextWithXRefTable(conf, nup.PageDim)
if err != nil {
return nil, err
}
pagesIndRef, err := ctx.Pages()
if err != nil {
return nil, err
}
// This is the page tree root.
pagesDict, err := ctx.DereferenceDict(*pagesIndRef)
if err != nil {
return nil, err
}
if len(imageFileNames) == 1 {
err = pdfcpu.NUpFromOneImage(ctx, imageFileNames[0], nup, pagesDict, pagesIndRef)
} else {
err = pdfcpu.NUpFromMultipleImages(ctx, imageFileNames, nup, pagesDict, pagesIndRef)
}
return ctx, err
}
// NUp rearranges PDF pages or images into page grids and writes the result to w.
// Either rs or imgFiles will be used.
func NUp(rs io.ReadSeeker, w io.Writer, imgFiles, selectedPages []string, nup *pdfcpu.NUp, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.NUP
log.Info.Printf("%s", nup)
var (
ctx *pdfcpu.Context
err error
)
if nup.ImgInputFile {
if ctx, err = NUpFromImage(conf, imgFiles, nup); err != nil {
return err
}
} else {
if ctx, _, _, err = readAndValidate(rs, conf, time.Now()); err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
// New pages get added to ctx while old pages get deleted.
// This way we avoid migrating objects between contexts.
if err = ctx.NUpFromPDF(pages, nup); err != nil {
return err
}
}
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctx); err != nil {
return err
}
}
if err = WriteContext(ctx, w); err != nil {
return err
}
log.Stats.Printf("XRefTable:\n%s\n", ctx)
return nil
}
// NUpFile rearranges PDF pages or images into page grids and writes the result to outFile.
func NUpFile(inFiles []string, outFile string, selectedPages []string, nup *pdfcpu.NUp, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if !nup.ImgInputFile {
// Nup from a PDF page.
if f1, err = os.Open(inFiles[0]); err != nil {
return err
}
}
if f2, err = os.Create(outFile); err != nil {
return err
}
log.CLI.Printf("writing %s...\n", outFile)
defer func() {
if err != nil {
if f1 != nil {
f1.Close()
}
f2.Close()
return
}
if f1 != nil {
if err = f1.Close(); err != nil {
return
}
}
err = f2.Close()
return
}()
return NUp(f1, f2, inFiles, selectedPages, nup, conf)
}

108
vendor/github.com/pdfcpu/pdfcpu/pkg/api/optimize.go generated vendored Normal file
View File

@ -0,0 +1,108 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// Optimize reads a PDF stream from rs and writes the optimized PDF stream to w.
func Optimize(rs io.ReadSeeker, w io.Writer, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
conf.Cmd = pdfcpu.OPTIMIZE
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
log.Stats.Printf("XRefTable:\n%s\n", ctx)
fromWrite := time.Now()
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "write", durRead, durVal, durOpt, durWrite, durTotal)
// For Optimize only.
if ctx.StatsFileName != "" {
err = pdfcpu.AppendStatsFile(ctx)
if err != nil {
return errors.Wrap(err, "Write stats failed.")
}
}
return nil
}
// OptimizeFile reads inFile and writes the optimized PDF to outFile.
// If outFile is not provided then inFile gets overwritten
// which leads to the same result as when inFile equals outFile.
func OptimizeFile(inFile, outFile string, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return Optimize(f1, f2, conf)
}

253
vendor/github.com/pdfcpu/pdfcpu/pkg/api/pages.go generated vendored Normal file
View File

@ -0,0 +1,253 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// InsertPages inserts a blank page before or after every page selected of rs and writes the result to w.
func InsertPages(rs io.ReadSeeker, w io.Writer, selectedPages []string, before bool, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.INSERTPAGESAFTER
if before {
conf.Cmd = pdfcpu.INSERTPAGESBEFORE
}
fromStart := time.Now()
ctx, _, _, _, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
if err = ctx.InsertBlankPages(pages, before); err != nil {
return err
}
log.Stats.Printf("XRefTable:\n%s\n", ctx)
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctx); err != nil {
return err
}
}
if err = WriteContext(ctx, w); err != nil {
return err
}
log.Stats.Printf("XRefTable:\n%s\n", ctx)
return nil
}
// InsertPagesFile inserts a blank page before or after every inFile page selected and writes the result to w.
func InsertPagesFile(inFile, outFile string, selectedPages []string, before bool, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return InsertPages(f1, f2, selectedPages, before, conf)
}
// RemovePages removes selected pages from rs and writes the result to w.
func RemovePages(rs io.ReadSeeker, w io.Writer, selectedPages []string, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.REMOVEPAGES
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
fromWrite := time.Now()
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, false)
if err != nil {
return err
}
// ctx.Pagecount gets set during validation.
if len(pages) >= ctx.PageCount {
return errors.New("pdfcpu: operation invalid")
}
// No special context processing required.
// WriteContext decides which pages get written by checking conf.Cmd
ctx.Write.SelectedPages = pages
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "remove pages, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// RemovePagesFile removes selected inFile pages and writes the result to outFile..
func RemovePagesFile(inFile, outFile string, selectedPages []string, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return RemovePages(f1, f2, selectedPages, conf)
}
// PageCount returns rs's page count.
func PageCount(rs io.ReadSeeker, conf *pdfcpu.Configuration) (int, error) {
ctx, err := ReadContext(rs, conf)
if err != nil {
return 0, err
}
if err := ValidateContext(ctx); err != nil {
return 0, err
}
return ctx.PageCount, nil
}
// PageCountFile returns inFile's page count.
func PageCountFile(inFile string) (int, error) {
f, err := os.Open(inFile)
if err != nil {
return 0, err
}
defer f.Close()
return PageCount(f, pdfcpu.NewDefaultConfiguration())
}
// PageDims returns a sorted slice of mediaBox dimensions for rs.
func PageDims(rs io.ReadSeeker, conf *pdfcpu.Configuration) ([]pdfcpu.Dim, error) {
ctx, err := ReadContext(rs, conf)
if err != nil {
return nil, err
}
pd, err := ctx.PageDims()
if err != nil {
return nil, err
}
if len(pd) != ctx.PageCount {
return nil, errors.New("pdfcpu: corrupt page dimensions")
}
return pd, nil
}
// PageDimsFile returns a sorted slice of mediaBox dimensions for inFile.
func PageDimsFile(inFile string) ([]pdfcpu.Dim, error) {
f, err := os.Open(inFile)
if err != nil {
return nil, err
}
defer f.Close()
return PageDims(f, pdfcpu.NewDefaultConfiguration())
}

168
vendor/github.com/pdfcpu/pdfcpu/pkg/api/permissions.go generated vendored Normal file
View File

@ -0,0 +1,168 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// ListPermissions returns a list of user access permissions.
func ListPermissions(rs io.ReadSeeker, conf *pdfcpu.Configuration) ([]string, error) {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.LISTPERMISSIONS
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return nil, err
}
fromList := time.Now()
list := pdfcpu.Permissions(ctx)
durList := time.Since(fromList).Seconds()
durTotal := time.Since(fromStart).Seconds()
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdfcpu.TimingStats("list permissions", durRead, durVal, durOpt, durList, durTotal)
return list, nil
}
// ListPermissionsFile returns a list of user access permissions for inFile.
func ListPermissionsFile(inFile string, conf *pdfcpu.Configuration) ([]string, error) {
f, err := os.Open(inFile)
if err != nil {
return nil, err
}
defer func() {
f.Close()
}()
return ListPermissions(f, conf)
}
// SetPermissions sets user access permissions.
// inFile has to be encrypted.
// A configuration containing the current passwords is required.
func SetPermissions(rs io.ReadSeeker, w io.Writer, conf *pdfcpu.Configuration) error {
if conf == nil {
return errors.New("pdfcpu: missing configuration for setting permissions")
}
conf.Cmd = pdfcpu.SETPERMISSIONS
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
fromWrite := time.Now()
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// SetPermissionsFile sets inFile's user access permissions.
// inFile has to be encrypted.
// A configuration containing the current passwords is required.
func SetPermissionsFile(inFile, outFile string, conf *pdfcpu.Configuration) (err error) {
if conf == nil {
return errors.New("pdfcpu: missing configuration for setting permissions")
}
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return SetPermissions(f1, f2, conf)
}
// GetPermissions returns the permissions for rs.
func GetPermissions(rs io.ReadSeeker, conf *pdfcpu.Configuration) (*int16, error) {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
ctx, _, _, err := readAndValidate(rs, conf, time.Now())
if err != nil {
return nil, err
}
if ctx.E == nil {
// Full access - permissions don't apply.
return nil, nil
}
p := int16(ctx.E.P)
return &p, nil
}
// GetPermissionsFile returns the permissions for inFile.
func GetPermissionsFile(inFile string, conf *pdfcpu.Configuration) (*int16, error) {
f, err := os.Open(inFile)
if err != nil {
return nil, err
}
defer f.Close()
return GetPermissions(f, conf)
}

221
vendor/github.com/pdfcpu/pdfcpu/pkg/api/properties.go generated vendored Normal file
View File

@ -0,0 +1,221 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
pdf "github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// ListProperties returns the property list of rs.
func ListProperties(rs io.ReadSeeker, conf *pdf.Configuration) ([]string, error) {
if conf == nil {
conf = pdf.NewDefaultConfiguration()
} else {
// Validation loads infodict.
conf.ValidationMode = pdf.ValidationRelaxed
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return nil, err
}
fromWrite := time.Now()
list, err := pdf.PropertiesList(ctx.XRefTable)
if err != nil {
return nil, err
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdf.TimingStats("list files", durRead, durVal, durOpt, durWrite, durTotal)
return list, nil
}
// ListPropertiesFile returns the property list of inFile.
func ListPropertiesFile(inFile string, conf *pdf.Configuration) ([]string, error) {
f, err := os.Open(inFile)
if err != nil {
return nil, err
}
defer f.Close()
return ListProperties(f, conf)
}
// AddProperties embeds files into a PDF context read from rs and writes the result to w.
func AddProperties(rs io.ReadSeeker, w io.Writer, properties map[string]string, conf *pdf.Configuration) error {
if conf == nil {
conf = pdf.NewDefaultConfiguration()
} else {
// Validation loads infodict.
conf.ValidationMode = pdf.ValidationRelaxed
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
from := time.Now()
if err = pdf.PropertiesAdd(ctx.XRefTable, properties); err != nil {
return err
}
durAdd := time.Since(from).Seconds()
fromWrite := time.Now()
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := durAdd + time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "add keyword, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// AddPropertiesFile embeds files into a PDF context read from inFile and writes the result to outFile.
func AddPropertiesFile(inFile, outFile string, properties map[string]string, conf *pdf.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
if outFile == "" || inFile == outFile {
os.Remove(tmpFile)
}
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return AddProperties(f1, f2, properties, conf)
}
// RemoveProperties deletes embedded files from a PDF context read from rs and writes the result to w.
func RemoveProperties(rs io.ReadSeeker, w io.Writer, properties []string, conf *pdf.Configuration) error {
if conf == nil {
conf = pdf.NewDefaultConfiguration()
} else {
// Validation loads infodict.
conf.ValidationMode = pdf.ValidationRelaxed
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
from := time.Now()
var ok bool
if ok, err = pdf.PropertiesRemove(ctx.XRefTable, properties); err != nil {
return err
}
if !ok {
return errors.New("no property removed")
}
durRemove := time.Since(from).Seconds()
fromWrite := time.Now()
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := durRemove + time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "remove prop, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// RemovePropertiesFile deletes embedded files from a PDF context read from inFile and writes the result to outFile.
func RemovePropertiesFile(inFile, outFile string, properties []string, conf *pdf.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
if outFile == "" || inFile == outFile {
os.Remove(tmpFile)
}
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return RemoveProperties(f1, f2, properties, conf)
}

116
vendor/github.com/pdfcpu/pdfcpu/pkg/api/rotate.go generated vendored Normal file
View File

@ -0,0 +1,116 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
)
// Rotate rotates selected pages of rs clockwise by rotation degrees and writes the result to w.
func Rotate(rs io.ReadSeeker, w io.Writer, rotation int, selectedPages []string, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.ROTATE
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
from := time.Now()
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
if err = pdfcpu.RotatePages(ctx, pages, rotation); err != nil {
return err
}
log.Stats.Printf("XRefTable:\n%s\n", ctx)
durStamp := time.Since(from).Seconds()
fromWrite := time.Now()
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctx); err != nil {
return err
}
}
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := durStamp + time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "rotate, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// RotateFile rotates selected pages of inFile clockwise by rotation degrees and writes the result to outFile.
func RotateFile(inFile, outFile string, rotation int, selectedPages []string, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return Rotate(f1, f2, rotation, selectedPages, conf)
}

651
vendor/github.com/pdfcpu/pdfcpu/pkg/api/selectPages.go generated vendored Normal file
View File

@ -0,0 +1,651 @@
/*
Copyright 2018 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"fmt"
"regexp"
"sort"
"strconv"
"strings"
"github.com/pdfcpu/pdfcpu/pkg/log"
pdf "github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
var (
selectedPagesRegExp *regexp.Regexp
)
func setupRegExpForPageSelection() *regexp.Regexp {
e := "(\\d+)?-l(-\\d+)?|l(-(\\d+)-?)?"
e = "[!n]?((-\\d+)|(\\d+(-(\\d+)?)?)|" + e + ")"
e = "\\Qeven\\E|\\Qodd\\E|" + e
exp := "^" + e + "(," + e + ")*$"
re, _ := regexp.Compile(exp)
return re
}
func init() {
selectedPagesRegExp = setupRegExpForPageSelection()
}
// ParsePageSelection ensures a correct page selection expression.
func ParsePageSelection(s string) ([]string, error) {
if s == "" {
return nil, nil
}
// Ensure valid comma separated expression of:{ {even|odd}{!}{-}# | {even|odd}{!}#-{#} }*
//
// Negated expressions:
// '!' negates an expression
// since '!' needs to be part of a single quoted string in bash
// as an alternative also 'n' works instead of "!"
//
// Extract all but page 4 may be expressed as: "1-,!4" or "1-,n4"
//
// The pageSelection is evaluated strictly from left to right!
// e.g. "!3,1-5" extracts pages 1-5 whereas "1-5,!3" extracts pages 1,2,4,5
//
if !selectedPagesRegExp.MatchString(s) {
return nil, errors.Errorf("-pages \"%s\" => syntax error\n", s)
}
//log.CLI.Printf("pageSelection: %s\n", s)
return strings.Split(s, ","), nil
}
func handlePrefix(v string, negated bool, pageCount int, selectedPages pdf.IntSet) error {
// -l
if v == "l" {
for j := 1; j <= pageCount; j++ {
selectedPages[j] = !negated
}
return nil
}
// -l-#
if strings.HasPrefix(v, "l-") {
i, err := strconv.Atoi(v[2:])
if err != nil {
return err
}
if pageCount-i < 1 {
return nil
}
for j := 1; j <= pageCount-i; j++ {
selectedPages[j] = !negated
}
return nil
}
// -#
i, err := strconv.Atoi(v)
if err != nil {
return err
}
// Handle overflow gracefully
if i > pageCount {
i = pageCount
}
// identified
// -# ... select all pages up to and including #
// or !-# ... deselect all pages up to and including #
for j := 1; j <= i; j++ {
selectedPages[j] = !negated
}
return nil
}
func handleSuffix(v string, negated bool, pageCount int, selectedPages pdf.IntSet) error {
// must be #- ... select all pages from here until the end.
// or !#- ... deselect all pages from here until the end.
i, err := strconv.Atoi(v)
if err != nil {
return err
}
// Handle overflow gracefully
if i > pageCount {
return nil
}
for j := i; j <= pageCount; j++ {
selectedPages[j] = !negated
}
return nil
}
func handleSpecificPageOrLastXPages(s string, negated bool, pageCount int, selectedPages pdf.IntSet) error {
// l
if s == "l" {
selectedPages[pageCount] = !negated
return nil
}
// l-#
if strings.HasPrefix(s, "l-") {
pr := strings.Split(s[2:], "-")
i, err := strconv.Atoi(pr[0])
if err != nil {
return err
}
if pageCount-i < 1 {
return nil
}
j := pageCount - i
// l-#-
if strings.HasSuffix(s, "-") {
j = pageCount
}
for i := pageCount - i; i <= j; i++ {
selectedPages[i] = !negated
}
return nil
}
// must be # ... select a specific page
// or !# ... deselect a specific page
i, err := strconv.Atoi(s)
if err != nil {
return err
}
// Handle overflow gracefully
if i > pageCount {
return nil
}
selectedPages[i] = !negated
return nil
}
func negation(c byte) bool {
return c == '!' || c == 'n'
}
func selectEvenPages(selectedPages pdf.IntSet, pageCount int) {
for i := 2; i <= pageCount; i += 2 {
_, found := selectedPages[i]
if !found {
selectedPages[i] = true
}
}
}
func selectOddPages(selectedPages pdf.IntSet, pageCount int) {
for i := 1; i <= pageCount; i += 2 {
_, found := selectedPages[i]
if !found {
selectedPages[i] = true
}
}
}
func parsePageRange(pr []string, pageCount int, negated bool, selectedPages pdf.IntSet) error {
from, err := strconv.Atoi(pr[0])
if err != nil {
return err
}
// Handle overflow gracefully
if from > pageCount {
return nil
}
var thru int
if pr[1] == "l" {
// #-l
thru = pageCount
if len(pr) == 3 {
// #-l-#
i, err := strconv.Atoi(pr[2])
if err != nil {
return err
}
thru -= i
}
} else {
// #-#
var err error
thru, err = strconv.Atoi(pr[1])
if err != nil {
return err
}
}
// Handle overflow gracefully
if thru < from {
return nil
}
if thru > pageCount {
thru = pageCount
}
for i := from; i <= thru; i++ {
selectedPages[i] = !negated
}
return nil
}
func sortedPages(selectedPages pdf.IntSet) []int {
p := []int(nil)
for i, v := range selectedPages {
if v {
p = append(p, i)
}
}
sort.Ints(p)
return p
}
func logSelPages(selectedPages pdf.IntSet) {
if !log.IsCLILoggerEnabled() {
return
}
var b strings.Builder
for _, i := range sortedPages(selectedPages) {
fmt.Fprintf(&b, "%d,", i)
}
s := b.String()
if len(s) > 1 {
s = s[:len(s)-1]
}
log.CLI.Printf("pages: %s\n", s)
}
// selectedPages returns a set of used page numbers.
// key==page# => key 0 unused!
func selectedPages(pageCount int, pageSelection []string) (pdf.IntSet, error) {
selectedPages := pdf.IntSet{}
for _, v := range pageSelection {
//log.Stats.Printf("pageExp: <%s>\n", v)
if v == "even" {
selectEvenPages(selectedPages, pageCount)
continue
}
if v == "odd" {
selectOddPages(selectedPages, pageCount)
continue
}
var negated bool
if negation(v[0]) {
negated = true
//logInfoAPI.Printf("is a negated exp\n")
v = v[1:]
}
// -#
if v[0] == '-' {
v = v[1:]
if err := handlePrefix(v, negated, pageCount, selectedPages); err != nil {
return nil, err
}
continue
}
// #-
if v[0] != 'l' && strings.HasSuffix(v, "-") {
if err := handleSuffix(v[:len(v)-1], negated, pageCount, selectedPages); err != nil {
return nil, err
}
continue
}
// l l-# l-#-
if v[0] == 'l' {
if err := handleSpecificPageOrLastXPages(v, negated, pageCount, selectedPages); err != nil {
return nil, err
}
continue
}
pr := strings.Split(v, "-")
if len(pr) >= 2 {
// v contains '-' somewhere in the middle
// #-# #-l #-l-#
if err := parsePageRange(pr, pageCount, negated, selectedPages); err != nil {
return nil, err
}
continue
}
// #
if err := handleSpecificPageOrLastXPages(pr[0], negated, pageCount, selectedPages); err != nil {
return nil, err
}
}
logSelPages(selectedPages)
return selectedPages, nil
}
// PagesForPageSelection ensures a set of page numbers for an ascending page sequence
// where each page number may appear only once.
func PagesForPageSelection(pageCount int, pageSelection []string, ensureAllforNone bool) (pdf.IntSet, error) {
if pageSelection != nil && len(pageSelection) > 0 {
return selectedPages(pageCount, pageSelection)
}
if !ensureAllforNone {
//log.CLI.Printf("pages: none\n")
return nil, nil
}
m := pdf.IntSet{}
for i := 1; i <= pageCount; i++ {
m[i] = true
}
log.CLI.Printf("pages: all\n")
return m, nil
}
func deletePageFromCollection(cp *[]int, p int) {
a := []int{}
for _, i := range *cp {
if i != p {
a = append(a, i)
}
}
*cp = a
}
func processPageForCollection(cp *[]int, negated bool, i int) {
if !negated {
*cp = append(*cp, i)
} else {
deletePageFromCollection(cp, i)
}
}
func collectEvenPages(cp *[]int, pageCount int) {
for i := 2; i <= pageCount; i += 2 {
*cp = append(*cp, i)
}
}
func collectOddPages(cp *[]int, pageCount int) {
for i := 1; i <= pageCount; i += 2 {
*cp = append(*cp, i)
}
}
func handlePrefixForCollection(v string, negated bool, pageCount int, cp *[]int) error {
// -l
if v == "l" {
for j := 1; j <= pageCount; j++ {
processPageForCollection(cp, negated, j)
}
return nil
}
// -l-#
if strings.HasPrefix(v, "l-") {
i, err := strconv.Atoi(v[2:])
if err != nil {
return err
}
if pageCount-i < 1 {
return nil
}
for j := 1; j <= pageCount-i; j++ {
processPageForCollection(cp, negated, j)
}
return nil
}
// -#
i, err := strconv.Atoi(v)
if err != nil {
return err
}
// Handle overflow gracefully
if i > pageCount {
i = pageCount
}
// identified
// -# ... select all pages up to and including #
// or !-# ... deselect all pages up to and including #
for j := 1; j <= i; j++ {
processPageForCollection(cp, negated, j)
}
return nil
}
func handleSuffixForCollection(v string, negated bool, pageCount int, cp *[]int) error {
// must be #- ... select all pages from here until the end.
// or !#- ... deselect all pages from here until the end.
i, err := strconv.Atoi(v)
if err != nil {
return err
}
// Handle overflow gracefully
if i > pageCount {
return nil
}
for j := i; j <= pageCount; j++ {
processPageForCollection(cp, negated, j)
}
return nil
}
func handleSpecificPageOrLastXPagesForCollection(s string, negated bool, pageCount int, cp *[]int) error {
// l
if s == "l" {
processPageForCollection(cp, negated, pageCount)
return nil
}
// l-#
if strings.HasPrefix(s, "l-") {
pr := strings.Split(s[2:], "-")
i, err := strconv.Atoi(pr[0])
if err != nil {
return err
}
if pageCount-i < 1 {
return nil
}
j := pageCount - i
// l-#-
if strings.HasSuffix(s, "-") {
j = pageCount
}
for i := pageCount - i; i <= j; i++ {
processPageForCollection(cp, negated, i)
}
return nil
}
// must be # ... select a specific page
// or !# ... deselect a specific page
i, err := strconv.Atoi(s)
if err != nil {
return err
}
// Handle overflow gracefully
if i > pageCount {
return nil
}
processPageForCollection(cp, negated, i)
return nil
}
func parsePageRangeForCollection(pr []string, pageCount int, negated bool, cp *[]int) error {
from, err := strconv.Atoi(pr[0])
if err != nil {
return err
}
// Handle overflow gracefully
if from > pageCount {
return nil
}
var thru int
if pr[1] == "l" {
// #-l
thru = pageCount
if len(pr) == 3 {
// #-l-#
i, err := strconv.Atoi(pr[2])
if err != nil {
return err
}
thru -= i
}
} else {
// #-#
var err error
thru, err = strconv.Atoi(pr[1])
if err != nil {
return err
}
}
// Handle overflow gracefully
if thru < from {
return nil
}
if thru > pageCount {
thru = pageCount
}
for i := from; i <= thru; i++ {
processPageForCollection(cp, negated, i)
}
return nil
}
// PagesForPageCollection returns a slice of page numbers for a page collection.
// Any page number in any order any number of times allowed.
func PagesForPageCollection(pageCount int, pageSelection []string) ([]int, error) {
collectedPages := []int{}
for _, v := range pageSelection {
if v == "even" {
collectEvenPages(&collectedPages, pageCount)
continue
}
if v == "odd" {
collectOddPages(&collectedPages, pageCount)
continue
}
var negated bool
if negation(v[0]) {
negated = true
//logInfoAPI.Printf("is a negated exp\n")
v = v[1:]
}
// -#
if v[0] == '-' {
v = v[1:]
if err := handlePrefixForCollection(v, negated, pageCount, &collectedPages); err != nil {
return nil, err
}
continue
}
// #-
if v[0] != 'l' && strings.HasSuffix(v, "-") {
if err := handleSuffixForCollection(v[:len(v)-1], negated, pageCount, &collectedPages); err != nil {
return nil, err
}
continue
}
// l l-# l-#-
if v[0] == 'l' {
if err := handleSpecificPageOrLastXPagesForCollection(v, negated, pageCount, &collectedPages); err != nil {
return nil, err
}
continue
}
pr := strings.Split(v, "-")
if len(pr) >= 2 {
// v contains '-' somewhere in the middle
// #-# #-l #-l-#
if err := parsePageRangeForCollection(pr, pageCount, negated, &collectedPages); err != nil {
return nil, err
}
continue
}
// #
if err := handleSpecificPageOrLastXPagesForCollection(pr[0], negated, pageCount, &collectedPages); err != nil {
return nil, err
}
}
return collectedPages, nil
}
// PagesForPageRange returns a slice of page numbers for a page range.
func PagesForPageRange(from, thru int) []int {
s := make([]int, thru-from+1)
for i := 0; i < len(s); i++ {
s[i] = from + i
}
return s
}

186
vendor/github.com/pdfcpu/pdfcpu/pkg/api/split.go generated vendored Normal file
View File

@ -0,0 +1,186 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
)
func spanFileName(fileName string, from, thru int) string {
baseFileName := filepath.Base(fileName)
fn := strings.TrimSuffix(baseFileName, ".pdf")
fn = fn + "_" + strconv.Itoa(from)
if from == thru {
return fn + ".pdf"
}
return fn + "-" + strconv.Itoa(thru) + ".pdf"
}
func writeSpan(ctx *pdfcpu.Context, from, thru int, outDir, fileName string, forBookmark bool) error {
selectedPages := PagesForPageRange(from, thru)
ctxDest, err := pdfcpu.CreateContextWithXRefTable(nil, pdfcpu.PaperSize["A4"])
if err != nil {
return err
}
usePgCache := false
if err := pdfcpu.AddPages(ctx, ctxDest, selectedPages, usePgCache); err != nil {
return err
}
w := ctxDest.Write
w.DirName = outDir
w.FileName = fileName + ".pdf"
if !forBookmark {
w.FileName = spanFileName(fileName, from, thru)
//log.CLI.Printf("writing to: <%s>\n", w.FileName)
}
return pdfcpu.Write(ctxDest)
}
func writePageSpan(ctx *pdfcpu.Context, from, thru int, outDir, fileName string, forBookmark bool) error {
selectedPages := PagesForPageRange(from, thru)
// Create context with copies of selectedPages.
ctxNew, err := ctx.ExtractPages(selectedPages, false)
if err != nil {
return err
}
// Write context to file.
outFile := filepath.Join(outDir, fileName+".pdf")
if !forBookmark {
outFile = filepath.Join(outDir, spanFileName(fileName, from, thru))
}
return WriteContextFile(ctxNew, outFile)
}
func writePageSpansSplitAlongBookmarks(ctx *pdfcpu.Context, outDir string) error {
bms, err := ctx.BookmarksForOutlineLevel1()
if err != nil {
return err
}
for _, bm := range bms {
fileName := bm.Title
from := bm.PageFrom
thru := bm.PageThru
if thru == 0 {
thru = ctx.PageCount
}
forBookmark := true
if err := writePageSpan(ctx, from, thru, outDir, fileName, forBookmark); err != nil {
return err
}
}
return nil
}
func writePageSpans(ctx *pdfcpu.Context, span int, outDir, fileName string) error {
if span == 0 {
return writePageSpansSplitAlongBookmarks(ctx, outDir)
}
forBookmark := false
for i := 0; i < ctx.PageCount/span; i++ {
start := i * span
from := start + 1
thru := start + span
if err := writePageSpan(ctx, from, thru, outDir, fileName, forBookmark); err != nil {
return err
}
}
// A possible last file has less than span pages.
if ctx.PageCount%span > 0 {
start := (ctx.PageCount / span) * span
from := start + 1
thru := ctx.PageCount
if err := writePageSpan(ctx, from, thru, outDir, fileName, forBookmark); err != nil {
return err
}
}
return nil
}
// Split generates a sequence of PDF files in outDir for the PDF stream read from rs obeying given split span.
// If span == 1 splitting results in single page PDFs.
// If span == 0 we split along given bookmarks (level 1 only).
// Default span: 1
func Split(rs io.ReadSeeker, outDir, fileName string, span int, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.SPLIT
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
fromWrite := time.Now()
if err = writePageSpans(ctx, span, outDir, fileName); err != nil {
return err
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "split", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// SplitFile generates a sequence of PDF files in outDir for inFile obeying given split span.
// If span == 1 splitting results in single page PDFs.
// If span == 0 we split along given bookmarks (level 1 only).
// Default span: 1
func SplitFile(inFile, outDir string, span int, conf *pdfcpu.Configuration) error {
f, err := os.Open(inFile)
if err != nil {
return err
}
log.CLI.Printf("splitting %s to %s/...\n", inFile, outDir)
defer func() {
if err != nil {
f.Close()
return
}
err = f.Close()
}()
return Split(f, outDir, filepath.Base(inFile), span, conf)
}

442
vendor/github.com/pdfcpu/pdfcpu/pkg/api/stamp.go generated vendored Normal file
View File

@ -0,0 +1,442 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// WatermarkContext applies wm for selected pages to ctx.
func WatermarkContext(ctx *pdfcpu.Context, selectedPages pdfcpu.IntSet, wm *pdfcpu.Watermark) error {
return ctx.AddWatermarks(selectedPages, wm)
}
// AddWatermarksMap adds watermarks in m to corresponding pages in rs and writes the result to w.
func AddWatermarksMap(rs io.ReadSeeker, w io.Writer, m map[int]*pdfcpu.Watermark, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.ADDWATERMARKS
if len(m) == 0 {
return errors.New("pdfcpu: missing watermarks")
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
from := time.Now()
if err = ctx.AddWatermarksMap(m); err != nil {
return err
}
log.Stats.Printf("XRefTable:\n%s\n", ctx)
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctx); err != nil {
return err
}
}
durStamp := time.Since(from).Seconds()
fromWrite := time.Now()
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := durStamp + time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "watermark, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// AddWatermarksMapFile adds watermarks to corresponding pages in m of inFile and writes the result to outFile.
func AddWatermarksMapFile(inFile, outFile string, m map[int]*pdfcpu.Watermark, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return AddWatermarksMap(f1, f2, m, conf)
}
// AddWatermarks adds watermarks to all pages selected in rs and writes the result to w.
func AddWatermarks(rs io.ReadSeeker, w io.Writer, selectedPages []string, wm *pdfcpu.Watermark, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.ADDWATERMARKS
if wm == nil {
return errors.New("pdfcpu: missing watermark configuration")
}
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
from := time.Now()
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
if err = ctx.AddWatermarks(pages, wm); err != nil {
return err
}
log.Stats.Printf("XRefTable:\n%s\n", ctx)
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctx); err != nil {
return err
}
}
durStamp := time.Since(from).Seconds()
fromWrite := time.Now()
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := durStamp + time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "watermark, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// AddWatermarksFile adds watermarks to all selected pages of inFile and writes the result to outFile.
func AddWatermarksFile(inFile, outFile string, selectedPages []string, wm *pdfcpu.Watermark, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return AddWatermarks(f1, f2, selectedPages, wm, conf)
}
// RemoveWatermarks removes watermarks from all pages selected in rs and writes the result to w.
func RemoveWatermarks(rs io.ReadSeeker, w io.Writer, selectedPages []string, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.REMOVEWATERMARKS
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
from := time.Now()
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, true)
if err != nil {
return err
}
if err = ctx.RemoveWatermarks(pages); err != nil {
return err
}
log.Stats.Printf("XRefTable:\n%s\n", ctx)
if conf.ValidationMode != pdfcpu.ValidationNone {
if err = ValidateContext(ctx); err != nil {
return err
}
}
durStamp := time.Since(from).Seconds()
fromWrite := time.Now()
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := durStamp + time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "watermark, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// RemoveWatermarksFile removes watermarks from all selected pages of inFile and writes the result to outFile.
func RemoveWatermarksFile(inFile, outFile string, selectedPages []string, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return RemoveWatermarks(f1, f2, selectedPages, conf)
}
// HasWatermarks checks rs for watermarks.
func HasWatermarks(rs io.ReadSeeker, conf *pdfcpu.Configuration) (bool, error) {
ctx, err := ReadContext(rs, conf)
if err != nil {
return false, err
}
if err := ctx.DetectWatermarks(); err != nil {
return false, err
}
return ctx.Watermarked, nil
}
// HasWatermarksFile checks inFile for watermarks.
func HasWatermarksFile(inFile string, conf *pdfcpu.Configuration) (bool, error) {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
f, err := os.Open(inFile)
if err != nil {
return false, err
}
defer f.Close()
return HasWatermarks(f, conf)
}
// TextWatermark returns a text watermark configuration.
func TextWatermark(text, desc string, onTop, update bool, u pdfcpu.DisplayUnit) (*pdfcpu.Watermark, error) {
wm, err := pdfcpu.ParseTextWatermarkDetails(text, desc, onTop, u)
if err != nil {
return nil, err
}
wm.Update = update
return wm, nil
}
// ImageWatermark returns an image watermark configuration.
func ImageWatermark(fileName, desc string, onTop, update bool, u pdfcpu.DisplayUnit) (*pdfcpu.Watermark, error) {
wm, err := pdfcpu.ParseImageWatermarkDetails(fileName, desc, onTop, u)
if err != nil {
return nil, err
}
wm.Update = update
return wm, nil
}
// PDFWatermark returns a PDF watermark configuration.
func PDFWatermark(fileName, desc string, onTop, update bool, u pdfcpu.DisplayUnit) (*pdfcpu.Watermark, error) {
wm, err := pdfcpu.ParsePDFWatermarkDetails(fileName, desc, onTop, u)
if err != nil {
return nil, err
}
wm.Update = update
return wm, nil
}
// AddTextWatermarksFile adds text stamps/watermarks to all selected pages of inFile and writes the result to outFile.
func AddTextWatermarksFile(inFile, outFile string, selectedPages []string, onTop bool, text, desc string, conf *pdfcpu.Configuration) error {
unit := pdfcpu.POINTS
if conf != nil {
unit = conf.Unit
}
wm, err := TextWatermark(text, desc, onTop, false, unit)
if err != nil {
return err
}
return AddWatermarksFile(inFile, outFile, selectedPages, wm, conf)
}
// AddImageWatermarksFile adds image stamps/watermarks to all selected pages of inFile and writes the result to outFile.
func AddImageWatermarksFile(inFile, outFile string, selectedPages []string, onTop bool, fileName, desc string, conf *pdfcpu.Configuration) error {
unit := pdfcpu.POINTS
if conf != nil {
unit = conf.Unit
}
wm, err := ImageWatermark(fileName, desc, onTop, false, unit)
if err != nil {
return err
}
return AddWatermarksFile(inFile, outFile, selectedPages, wm, conf)
}
// AddPDFWatermarksFile adds PDF stamps/watermarks to all selected pages of inFile and writes the result to outFile.
func AddPDFWatermarksFile(inFile, outFile string, selectedPages []string, onTop bool, fileName, desc string, conf *pdfcpu.Configuration) error {
unit := pdfcpu.POINTS
if conf != nil {
unit = conf.Unit
}
wm, err := PDFWatermark(fileName, desc, onTop, false, unit)
if err != nil {
return err
}
return AddWatermarksFile(inFile, outFile, selectedPages, wm, conf)
}
// UpdateTextWatermarksFile adds text stamps/watermarks to all selected pages of inFile and writes the result to outFile.
func UpdateTextWatermarksFile(inFile, outFile string, selectedPages []string, onTop bool, text, desc string, conf *pdfcpu.Configuration) error {
unit := pdfcpu.POINTS
if conf != nil {
unit = conf.Unit
}
wm, err := TextWatermark(text, desc, onTop, true, unit)
if err != nil {
return err
}
return AddWatermarksFile(inFile, outFile, selectedPages, wm, conf)
}
// UpdateImageWatermarksFile adds image stamps/watermarks to all selected pages of inFile and writes the result to outFile.
func UpdateImageWatermarksFile(inFile, outFile string, selectedPages []string, onTop bool, fileName, desc string, conf *pdfcpu.Configuration) error {
unit := pdfcpu.POINTS
if conf != nil {
unit = conf.Unit
}
wm, err := ImageWatermark(fileName, desc, onTop, true, unit)
if err != nil {
return err
}
return AddWatermarksFile(inFile, outFile, selectedPages, wm, conf)
}
// UpdatePDFWatermarksFile adds PDF stamps/watermarks to all selected pages of inFile and writes the result to outFile.
func UpdatePDFWatermarksFile(inFile, outFile string, selectedPages []string, onTop bool, fileName, desc string, conf *pdfcpu.Configuration) error {
unit := pdfcpu.POINTS
if conf != nil {
unit = conf.Unit
}
wm, err := PDFWatermark(fileName, desc, onTop, true, unit)
if err != nil {
return err
}
return AddWatermarksFile(inFile, outFile, selectedPages, wm, conf)
}

109
vendor/github.com/pdfcpu/pdfcpu/pkg/api/trim.go generated vendored Normal file
View File

@ -0,0 +1,109 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
)
// Trim generates a trimmed version of rs
// containing all selected pages and writes the result to w.
func Trim(rs io.ReadSeeker, w io.Writer, selectedPages []string, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.TRIM
fromStart := time.Now()
ctx, durRead, durVal, durOpt, err := readValidateAndOptimize(rs, conf, fromStart)
if err != nil {
return err
}
if err := ctx.EnsurePageCount(); err != nil {
return err
}
fromWrite := time.Now()
pages, err := PagesForPageSelection(ctx.PageCount, selectedPages, false)
if err != nil {
return err
}
// No special context processing required.
// WriteContext decides which pages get written by checking conf.Cmd
ctx.Write.SelectedPages = pages
if err = WriteContext(ctx, w); err != nil {
return err
}
durWrite := time.Since(fromWrite).Seconds()
durTotal := time.Since(fromStart).Seconds()
logOperationStats(ctx, "trim, write", durRead, durVal, durOpt, durWrite, durTotal)
return nil
}
// TrimFile generates a trimmed version of inFile
// containing all selected pages and writes the result to outFile.
func TrimFile(inFile, outFile string, selectedPages []string, conf *pdfcpu.Configuration) (err error) {
var f1, f2 *os.File
if f1, err = os.Open(inFile); err != nil {
return err
}
tmpFile := inFile + ".tmp"
if outFile != "" && inFile != outFile {
tmpFile = outFile
log.CLI.Printf("writing %s...\n", outFile)
} else {
log.CLI.Printf("writing %s...\n", inFile)
}
if f2, err = os.Create(tmpFile); err != nil {
return err
}
defer func() {
if err != nil {
f2.Close()
f1.Close()
os.Remove(tmpFile)
return
}
if err = f2.Close(); err != nil {
return
}
if err = f1.Close(); err != nil {
return
}
if outFile == "" || inFile == outFile {
if err = os.Rename(tmpFile, inFile); err != nil {
return
}
}
}()
return Trim(f1, f2, selectedPages, conf)
}

98
vendor/github.com/pdfcpu/pdfcpu/pkg/api/validate.go generated vendored Normal file
View File

@ -0,0 +1,98 @@
/*
Copyright 2020 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"io"
"os"
"time"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
"github.com/pkg/errors"
)
// Validate validates a PDF stream read from rs.
func Validate(rs io.ReadSeeker, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
conf.Cmd = pdfcpu.VALIDATE
if conf.ValidationMode == pdfcpu.ValidationNone {
return errors.New("pdfcpu: validate: mode ValidationNone not allowed")
}
from1 := time.Now()
ctx, err := ReadContext(rs, conf)
if err != nil {
return err
}
dur1 := time.Since(from1).Seconds()
from2 := time.Now()
if err = ValidateContext(ctx); err != nil {
s := ""
if conf.ValidationMode == pdfcpu.ValidationStrict {
s = " (try -mode=relaxed)"
}
err = errors.Wrap(err, "validation error"+s)
}
dur2 := time.Since(from2).Seconds()
dur := time.Since(from1).Seconds()
log.Stats.Printf("XRefTable:\n%s\n", ctx)
pdfcpu.ValidationTimingStats(dur1, dur2, dur)
// at this stage: no binary breakup available!
if ctx.Read.FileSize > 0 {
ctx.Read.LogStats(ctx.Optimized)
}
return err
}
// ValidateFile validates inFile.
func ValidateFile(inFile string, conf *pdfcpu.Configuration) error {
if conf == nil {
conf = pdfcpu.NewDefaultConfiguration()
}
if conf != nil && conf.ValidationMode == pdfcpu.ValidationNone {
return nil
}
log.CLI.Printf("validating(mode=%s) %s ...\n", conf.ValidationModeString(), inFile)
f, err := os.Open(inFile)
if err != nil {
return err
}
defer f.Close()
if err = Validate(f, conf); err != nil {
return err
}
log.CLI.Println("validation ok")
return nil
}

View File

@ -0,0 +1,76 @@
/*
Copyright 2018 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package filter
import (
"bytes"
"encoding/ascii85"
"io"
"io/ioutil"
"github.com/pkg/errors"
)
type ascii85Decode struct {
baseFilter
}
const eodASCII85 = "~>"
// Encode implements encoding for an ASCII85Decode filter.
func (f ascii85Decode) Encode(r io.Reader) (io.Reader, error) {
p, err := ioutil.ReadAll(r)
if err != nil {
return nil, err
}
buf := &bytes.Buffer{}
encoder := ascii85.NewEncoder(buf)
encoder.Write(p)
encoder.Close()
// Add eod sequence
buf.WriteString(eodASCII85)
return buf, nil
}
// Decode implements decoding for an ASCII85Decode filter.
func (f ascii85Decode) Decode(r io.Reader) (io.Reader, error) {
p, err := ioutil.ReadAll(r)
if err != nil {
return nil, err
}
if !bytes.HasSuffix(p, []byte(eodASCII85)) {
return nil, errors.New("pdfcpu: Decode: missing eod marker")
}
// Strip eod sequence: "~>"
p = p[:len(p)-2]
decoder := ascii85.NewDecoder(bytes.NewReader(p))
buf, err := ioutil.ReadAll(decoder)
if err != nil {
return nil, err
}
return bytes.NewBuffer(buf), nil
}

View File

@ -0,0 +1,82 @@
/*
Copyright 2018 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package filter
import (
"bytes"
"encoding/hex"
"io"
"io/ioutil"
)
type asciiHexDecode struct {
baseFilter
}
const eodHexDecode = '>'
// Encode implements encoding for an ASCIIHexDecode filter.
func (f asciiHexDecode) Encode(r io.Reader) (io.Reader, error) {
bb, err := ioutil.ReadAll(r)
if err != nil {
return nil, err
}
dst := make([]byte, hex.EncodedLen(len(bb)))
hex.Encode(dst, bb)
// eod marker
dst = append(dst, eodHexDecode)
return bytes.NewBuffer(dst), nil
}
// Decode implements decoding for an ASCIIHexDecode filter.
func (f asciiHexDecode) Decode(r io.Reader) (io.Reader, error) {
bb, err := ioutil.ReadAll(r)
if err != nil {
return nil, err
}
var p []byte
// Remove any white space and cut off on eod
for i := 0; i < len(bb); i++ {
if bb[i] == eodHexDecode {
break
}
if !bytes.ContainsRune([]byte{0x09, 0x0A, 0x0C, 0x0D, 0x20}, rune(bb[i])) {
p = append(p, bb[i])
}
}
// if len == odd add "0"
if len(p)%2 == 1 {
p = append(p, '0')
}
dst := make([]byte, hex.DecodedLen(len(p)))
_, err = hex.Decode(dst, p)
if err != nil {
return nil, err
}
return bytes.NewBuffer(dst), nil
}

View File

@ -0,0 +1,93 @@
/*
Copyright 2018 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package filter
import (
"bytes"
"io"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pkg/errors"
"golang.org/x/image/ccitt"
)
type ccittDecode struct {
baseFilter
}
// Encode implements encoding for an CCITTDecode filter.
func (f ccittDecode) Encode(r io.Reader) (io.Reader, error) {
// TODO
return nil, nil
}
// Decode implements decoding for a CCITTDecode filter.
func (f ccittDecode) Decode(r io.Reader) (io.Reader, error) {
log.Trace.Println("DecodeCCITT begin")
var ok bool
// <0 : Pure two-dimensional encoding (Group 4)
// =0 : Pure one-dimensional encoding (Group 3, 1-D)
// >0 : Mixed one- and two-dimensional encoding (Group 3, 2-D)
k := 0
k, ok = f.parms["K"]
if ok && k > 0 {
return nil, errors.New("pdfcpu: filter CCITTFax k > 0 currently unsupported")
}
cols := 1728
col, ok := f.parms["Columns"]
if ok {
cols = col
}
rows, ok := f.parms["Rows"]
if !ok {
return nil, errors.New("pdfcpu: ccitt: missing DecodeParam \"Rows\"")
}
blackIs1 := false
v, ok := f.parms["BlackIs1"]
if ok && v == 1 {
blackIs1 = true
}
encodedByteAlign := false
v, ok = f.parms["EncodedByteAlign"]
if ok && v == 1 {
encodedByteAlign = true
}
opts := &ccitt.Options{Invert: blackIs1, Align: encodedByteAlign}
mode := ccitt.Group3
if k < 0 {
mode = ccitt.Group4
}
rd := ccitt.NewReader(r, ccitt.MSB, mode, cols, rows, opts)
var b bytes.Buffer
written, err := io.Copy(&b, rd)
if err != nil {
return nil, err
}
log.Trace.Printf("DecodeCCITT: decoded %d bytes.\n", written)
return &b, nil
}

99
vendor/github.com/pdfcpu/pdfcpu/pkg/filter/filter.go generated vendored Normal file
View File

@ -0,0 +1,99 @@
/*
Copyright 2018 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package filter contains PDF filter implementations.
package filter
import (
"io"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pkg/errors"
)
// PDF defines the following filters. See also 7.4 in the PDF spec.
const (
ASCII85 = "ASCII85Decode"
ASCIIHex = "ASCIIHexDecode"
RunLength = "RunLengthDecode"
LZW = "LZWDecode"
Flate = "FlateDecode"
CCITTFax = "CCITTFaxDecode"
JBIG2 = "JBIG2Decode"
DCT = "DCTDecode"
JPX = "JPXDecode"
)
// ErrUnsupportedFilter signals unsupported filter encountered.
var ErrUnsupportedFilter = errors.New("pdfcpu: filter not supported")
// Filter defines an interface for encoding/decoding PDF object streams.
type Filter interface {
Encode(r io.Reader) (io.Reader, error)
Decode(r io.Reader) (io.Reader, error)
}
// NewFilter returns a filter for given filterName and an optional parameter dictionary.
func NewFilter(filterName string, parms map[string]int) (filter Filter, err error) {
switch filterName {
case ASCII85:
filter = ascii85Decode{baseFilter{}}
case ASCIIHex:
filter = asciiHexDecode{baseFilter{}}
case RunLength:
filter = runLengthDecode{baseFilter{parms}}
case LZW:
filter = lzwDecode{baseFilter{parms}}
case Flate:
filter = flate{baseFilter{parms}}
case CCITTFax:
filter = ccittDecode{baseFilter{parms}}
case DCT:
// Unsupported
fallthrough
case JBIG2:
// Unsupported
fallthrough
case JPX:
// Unsupported
log.Info.Printf("Filter not supported: <%s>", filterName)
err = ErrUnsupportedFilter
default:
err = errors.Errorf("Invalid filter: <%s>", filterName)
}
return filter, err
}
// List return the list of all supported PDF filters.
func List() []string {
// Exclude CCITTFax, DCT, JBIG2 & JPX since they only makes sense in the context of image processing.
return []string{ASCII85, ASCIIHex, RunLength, LZW, Flate}
}
type baseFilter struct {
parms map[string]int
}

View File

@ -0,0 +1,335 @@
/*
Copyright 2018 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package filter
import (
"bytes"
"compress/zlib"
"io"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pkg/errors"
)
// Portions of this code are based on ideas of image/png: reader.go:readImagePass
// PNG is documented here: www.w3.org/TR/PNG-Filters.html
// PDF allows a prediction step prior to compression applying TIFF or PNG prediction.
// Predictor algorithm.
const (
PredictorNo = 1 // No prediction.
PredictorTIFF = 2 // Use TIFF prediction for all rows.
PredictorNone = 10 // Use PNGNone for all rows.
PredictorSub = 11 // Use PNGSub for all rows.
PredictorUp = 12 // Use PNGUp for all rows.
PredictorAverage = 13 // Use PNGAverage for all rows.
PredictorPaeth = 14 // Use PNGPaeth for all rows.
PredictorOptimum = 15 // Use the optimum PNG prediction for each row.
)
// For predictor > 2 PNG filters (see RFC 2083) get applied and the first byte of each pixelrow defines
// the prediction algorithm used for all pixels of this row.
const (
PNGNone = 0x00
PNGSub = 0x01
PNGUp = 0x02
PNGAverage = 0x03
PNGPaeth = 0x04
)
type flate struct {
baseFilter
}
// Encode implements encoding for a Flate filter.
func (f flate) Encode(r io.Reader) (io.Reader, error) {
log.Trace.Println("EncodeFlate begin")
// TODO Optional decode parameters may need predictor preprocessing.
var b bytes.Buffer
w := zlib.NewWriter(&b)
defer w.Close()
written, err := io.Copy(w, r)
if err != nil {
return nil, err
}
log.Trace.Printf("EncodeFlate end: %d bytes written\n", written)
return &b, nil
}
// Decode implements decoding for a Flate filter.
func (f flate) Decode(r io.Reader) (io.Reader, error) {
log.Trace.Println("DecodeFlate begin")
rc, err := zlib.NewReader(r)
if err != nil {
return nil, err
}
defer rc.Close()
// Optional decode parameters need postprocessing.
return f.decodePostProcess(rc)
}
func passThru(rin io.Reader) (*bytes.Buffer, error) {
var b bytes.Buffer
_, err := io.Copy(&b, rin)
return &b, err
}
func intMemberOf(i int, list []int) bool {
for _, v := range list {
if i == v {
return true
}
}
return false
}
// Each prediction value implies (a) certain row filter(s).
func validateRowFilter(f, p int) error {
switch p {
case PredictorNone:
if !intMemberOf(f, []int{PNGNone, PNGSub, PNGUp, PNGAverage, PNGPaeth}) {
return errors.Errorf("pdfcpu: validateRowFilter: PredictorOptimum, unexpected row filter #%02x", f)
}
// if f != PNGNone {
// return errors.Errorf("validateRowFilter: expected row filter #%02x, got: #%02x", PNGNone, f)
// }
case PredictorSub:
if f != PNGSub {
return errors.Errorf("pdfcpu: validateRowFilter: expected row filter #%02x, got: #%02x", PNGSub, f)
}
case PredictorUp:
if f != PNGUp {
return errors.Errorf("pdfcpu: validateRowFilter: expected row filter #%02x, got: #%02x", PNGUp, f)
}
case PredictorAverage:
if f != PNGAverage {
return errors.Errorf("pdfcpu: validateRowFilter: expected row filter #%02x, got: #%02x", PNGAverage, f)
}
case PredictorPaeth:
if f != PNGPaeth {
return errors.Errorf("pdfcpu: validateRowFilter: expected row filter #%02x, got: #%02x", PNGPaeth, f)
}
case PredictorOptimum:
if !intMemberOf(f, []int{PNGNone, PNGSub, PNGUp, PNGAverage, PNGPaeth}) {
return errors.Errorf("pdfcpu: validateRowFilter: PredictorOptimum, unexpected row filter #%02x", f)
}
default:
return errors.Errorf("pdfcpu: validateRowFilter: unexpected predictor #%02x", p)
}
return nil
}
func applyHorDiff(row []byte, colors int) ([]byte, error) {
// This works for 8 bits per color only.
for i := 1; i < len(row)/colors; i++ {
for j := 0; j < colors; j++ {
row[i*colors+j] += row[(i-1)*colors+j]
}
}
return row, nil
}
func processRow(pr, cr []byte, p, colors, bytesPerPixel int) ([]byte, error) {
//fmt.Printf("pr(%v) =\n%s\n", &pr, hex.Dump(pr))
//fmt.Printf("cr(%v) =\n%s\n", &cr, hex.Dump(cr))
if p == PredictorTIFF {
return applyHorDiff(cr, colors)
}
// Apply the filter.
cdat := cr[1:]
pdat := pr[1:]
// Get row filter from 1st byte
f := int(cr[0])
// The value of Predictor supplied by the decoding filter need not match the value
// used when the data was encoded if they are both greater than or equal to 10.
switch f {
case PNGNone:
// No operation.
case PNGSub:
for i := bytesPerPixel; i < len(cdat); i++ {
cdat[i] += cdat[i-bytesPerPixel]
}
case PNGUp:
for i, p := range pdat {
cdat[i] += p
}
case PNGAverage:
// The average of the two neighboring pixels (left and above).
// Raw(x) - floor((Raw(x-bpp)+Prior(x))/2)
for i := 0; i < bytesPerPixel; i++ {
cdat[i] += pdat[i] / 2
}
for i := bytesPerPixel; i < len(cdat); i++ {
cdat[i] += uint8((int(cdat[i-bytesPerPixel]) + int(pdat[i])) / 2)
}
case PNGPaeth:
filterPaeth(cdat, pdat, bytesPerPixel)
}
return cdat, nil
}
func (f flate) parameters() (colors, bpc, columns int, err error) {
// Colors, int
// The number of interleaved colour components per sample.
// Valid values are 1 to 4 (PDF 1.0) and 1 or greater (PDF 1.3). Default value: 1.
// Used by PredictorTIFF only.
colors, found := f.parms["Colors"]
if !found {
colors = 1
} else if colors == 0 {
return 0, 0, 0, errors.Errorf("pdfcpu: filter FlateDecode: \"Colors\" must be > 0")
}
// BitsPerComponent, int
// The number of bits used to represent each colour component in a sample.
// Valid values are 1, 2, 4, 8, and (PDF 1.5) 16. Default value: 8.
// Used by PredictorTIFF only.
bpc, found = f.parms["BitsPerComponent"]
if !found {
bpc = 8
} else if !intMemberOf(bpc, []int{1, 2, 4, 8, 16}) {
return 0, 0, 0, errors.Errorf("pdfcpu: filter FlateDecode: Unexpected \"BitsPerComponent\": %d", bpc)
} else if bpc != 8 {
return 0, 0, 0, errors.New("pdfcpu: filter FlateDecode: \"BitsPerComponent\" must be 8")
}
// Columns, int
// The number of samples in each row. Default value: 1.
columns, found = f.parms["Columns"]
if !found {
columns = 1
}
return colors, bpc, columns, nil
}
// decodePostProcess
func (f flate) decodePostProcess(r io.Reader) (io.Reader, error) {
predictor, found := f.parms["Predictor"]
if !found || predictor == PredictorNo {
return passThru(r)
}
if !intMemberOf(
predictor,
[]int{PredictorTIFF,
PredictorNone,
PredictorSub,
PredictorUp,
PredictorAverage,
PredictorPaeth,
PredictorOptimum,
}) {
return nil, errors.Errorf("pdfcpu: filter FlateDecode: undefined \"Predictor\" %d", predictor)
}
colors, bpc, columns, err := f.parameters()
if err != nil {
return nil, err
}
bytesPerPixel := (bpc*colors + 7) / 8
rowSize := bpc * colors * columns / 8
if predictor != PredictorTIFF {
// PNG prediction uses a row filter byte prefixing the pixelbytes of a row.
rowSize++
}
// cr and pr are the bytes for the current and previous row.
cr := make([]byte, rowSize)
pr := make([]byte, rowSize)
// Output buffer
var b bytes.Buffer
for {
// Read decompressed bytes for one pixel row.
n, err := io.ReadFull(r, cr)
if err != nil {
if err != io.EOF {
return nil, err
}
// eof
if n == 0 {
break
}
}
if n != rowSize {
return nil, errors.Errorf("pdfcpu: filter FlateDecode: read error, expected %d bytes, got: %d", rowSize, n)
}
d, err1 := processRow(pr, cr, predictor, colors, bytesPerPixel)
if err1 != nil {
return nil, err1
}
_, err1 = b.Write(d)
if err1 != nil {
return nil, err1
}
if err == io.EOF {
break
}
// Swap byte slices.
pr, cr = cr, pr
}
if b.Len()%(bpc*colors*columns/8) > 0 {
log.Info.Printf("failed postprocessing: %d %d\n", b.Len(), rowSize)
return nil, errors.New("pdfcpu: filter FlateDecode: postprocessing failed")
}
return &b, nil
}

View File

@ -0,0 +1,82 @@
/*
Copyright 2018 The pdfcpu Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package filter
import (
"bytes"
"io"
"github.com/hhrutter/lzw"
"github.com/pdfcpu/pdfcpu/pkg/log"
"github.com/pkg/errors"
)
type lzwDecode struct {
baseFilter
}
// Encode implements encoding for an LZWDecode filter.
func (f lzwDecode) Encode(r io.Reader) (io.Reader, error) {
log.Trace.Println("EncodeLZW begin")
var b bytes.Buffer
ec, ok := f.parms["EarlyChange"]
if !ok {
ec = 1
}
wc := lzw.NewWriter(&b, ec == 1)
defer wc.Close()
written, err := io.Copy(wc, r)
if err != nil {
return nil, err
}
log.Trace.Printf("EncodeLZW end: %d bytes written\n", written)
return &b, nil
}
// Decode implements decoding for an LZWDecode filter.
func (f lzwDecode) Decode(r io.Reader) (io.Reader, error) {
log.Trace.Println("DecodeLZW begin")
p, found := f.parms["Predictor"]
if found && p > 1 {
return nil, errors.Errorf("DecodeLZW: unsupported predictor %d", p)
}
ec, ok := f.parms["EarlyChange"]
if !ok {
ec = 1
}
rc := lzw.NewReader(r, ec == 1)
defer rc.Close()
var b bytes.Buffer
written, err := io.Copy(&b, rc)
if err != nil {
return nil, err
}
log.Trace.Printf("DecodeLZW: decoded %d bytes.\n", written)
return &b, nil
}

Some files were not shown because too many files have changed in this diff Show More