2018-12-11 16:06:34 +03:00
|
|
|
/*
|
|
|
|
* This file is subject to the terms and conditions defined in
|
|
|
|
* file 'LICENSE.md', which is part of this source code package.
|
|
|
|
*/
|
|
|
|
|
|
|
|
package model
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"os"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
|
2019-05-16 23:08:40 +03:00
|
|
|
"github.com/unidoc/unipdf/v3/common"
|
2019-05-16 23:44:51 +03:00
|
|
|
"github.com/unidoc/unipdf/v3/core"
|
2018-12-11 16:06:34 +03:00
|
|
|
)
|
|
|
|
|
2019-01-17 22:45:10 +00:00
|
|
|
// PdfAppender appends new PDF content to an existing PDF document via incremental updates.
|
2018-12-11 16:06:34 +03:00
|
|
|
type PdfAppender struct {
|
|
|
|
rs io.ReadSeeker
|
|
|
|
parser *core.PdfParser
|
|
|
|
roReader *PdfReader
|
|
|
|
Reader *PdfReader
|
|
|
|
pages []*PdfPage
|
|
|
|
acroForm *PdfAcroForm
|
|
|
|
|
|
|
|
xrefs core.XrefTable
|
2019-05-14 21:14:53 +00:00
|
|
|
xrefOffset int64
|
2018-12-11 16:06:34 +03:00
|
|
|
greatestObjNum int
|
|
|
|
|
2019-01-17 23:55:27 +00:00
|
|
|
// List of new objects and a map for quick lookups.
|
2019-05-14 21:14:53 +00:00
|
|
|
newObjects []core.PdfObject
|
|
|
|
hasNewObject map[core.PdfObject]struct{}
|
|
|
|
replaceObjects map[core.PdfObject]int64
|
|
|
|
|
|
|
|
// Used for skipping certain objects that are created (Pages etc).
|
|
|
|
ignoreObjects map[core.PdfObject]struct{}
|
2019-01-18 01:19:24 +00:00
|
|
|
|
2019-04-14 22:22:41 +00:00
|
|
|
// Map of objects traversed while resolving references. Set to that of the PdfReader on
|
|
|
|
// creation (NewPdfAppender).
|
|
|
|
traversed map[core.PdfObject]struct{}
|
|
|
|
|
2019-05-14 21:14:53 +00:00
|
|
|
prevRevisionSize int64
|
|
|
|
written bool
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
func getPageResources(p *PdfPage) map[core.PdfObjectName]core.PdfObject {
|
|
|
|
resources := make(map[core.PdfObjectName]core.PdfObject)
|
|
|
|
if p.Resources == nil {
|
|
|
|
return resources
|
|
|
|
}
|
|
|
|
if p.Resources.Font != nil {
|
|
|
|
if dict, found := core.GetDict(p.Resources.Font); found {
|
|
|
|
for _, key := range dict.Keys() {
|
|
|
|
resources[key] = dict.Get(key)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if p.Resources.ExtGState != nil {
|
|
|
|
if dict, found := core.GetDict(p.Resources.ExtGState); found {
|
|
|
|
for _, key := range dict.Keys() {
|
|
|
|
resources[key] = dict.Get(key)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if p.Resources.XObject != nil {
|
|
|
|
if dict, found := core.GetDict(p.Resources.XObject); found {
|
|
|
|
for _, key := range dict.Keys() {
|
|
|
|
resources[key] = dict.Get(key)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if p.Resources.Pattern != nil {
|
|
|
|
if dict, found := core.GetDict(p.Resources.Pattern); found {
|
|
|
|
for _, key := range dict.Keys() {
|
|
|
|
resources[key] = dict.Get(key)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if p.Resources.Shading != nil {
|
|
|
|
if dict, found := core.GetDict(p.Resources.Shading); found {
|
|
|
|
for _, key := range dict.Keys() {
|
|
|
|
resources[key] = dict.Get(key)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if p.Resources.ProcSet != nil {
|
|
|
|
if dict, found := core.GetDict(p.Resources.ProcSet); found {
|
|
|
|
for _, key := range dict.Keys() {
|
|
|
|
resources[key] = dict.Get(key)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if p.Resources.Properties != nil {
|
|
|
|
if dict, found := core.GetDict(p.Resources.Properties); found {
|
|
|
|
for _, key := range dict.Keys() {
|
|
|
|
resources[key] = dict.Get(key)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return resources
|
|
|
|
}
|
|
|
|
|
|
|
|
// NewPdfAppender creates a new Pdf appender from a Pdf reader.
|
|
|
|
func NewPdfAppender(reader *PdfReader) (*PdfAppender, error) {
|
2019-01-17 23:55:27 +00:00
|
|
|
a := &PdfAppender{
|
2019-04-14 22:22:41 +00:00
|
|
|
rs: reader.rs,
|
|
|
|
Reader: reader,
|
|
|
|
parser: reader.parser,
|
|
|
|
traversed: reader.traversed,
|
2019-01-17 23:55:27 +00:00
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
if size, err := a.rs.Seek(0, io.SeekEnd); err != nil {
|
|
|
|
return nil, err
|
|
|
|
} else {
|
|
|
|
a.prevRevisionSize = size
|
|
|
|
}
|
|
|
|
|
2018-12-11 16:06:34 +03:00
|
|
|
if _, err := a.rs.Seek(0, io.SeekStart); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
|
2018-12-11 16:06:34 +03:00
|
|
|
var err error
|
2019-01-17 23:55:27 +00:00
|
|
|
|
|
|
|
// Create a readonly (immutable) reader. It increases memory use but is necessary to be able
|
|
|
|
// to detect changes in the original reader objects.
|
|
|
|
//
|
|
|
|
// In the case where an existing page is modified, the page contents are replaced upon merging
|
|
|
|
// (appending). The new page will refer to objects from the read-only reader and new instances
|
|
|
|
// of objects that have been changes. Objects from the original reader are not appended, only
|
|
|
|
// new objects that modify the PDF. The change detection check is not resource demanding. It
|
|
|
|
// only checks owners (source) of indirect objects.
|
2018-12-11 16:06:34 +03:00
|
|
|
a.roReader, err = NewPdfReader(a.rs)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
for _, idx := range a.Reader.GetObjectNums() {
|
|
|
|
if a.greatestObjNum < idx {
|
|
|
|
a.greatestObjNum = idx
|
|
|
|
}
|
|
|
|
}
|
|
|
|
a.xrefs = a.parser.GetXrefTable()
|
2019-05-14 21:14:53 +00:00
|
|
|
a.xrefOffset = a.parser.GetXrefOffset()
|
|
|
|
|
2018-12-11 16:06:34 +03:00
|
|
|
a.hasNewObject = make(map[core.PdfObject]struct{})
|
|
|
|
for _, p := range a.roReader.PageList {
|
|
|
|
a.pages = append(a.pages, p)
|
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
a.replaceObjects = make(map[core.PdfObject]int64)
|
|
|
|
a.ignoreObjects = make(map[core.PdfObject]struct{})
|
2019-02-21 23:40:26 +02:00
|
|
|
|
2018-12-11 16:06:34 +03:00
|
|
|
a.acroForm = a.roReader.AcroForm
|
|
|
|
|
|
|
|
return a, nil
|
|
|
|
}
|
|
|
|
|
2019-05-14 21:14:53 +00:00
|
|
|
// updatesObjectsDeep recursively marks all objects under `obj` as updated appender (deep).
|
|
|
|
// Updated objects are appended to the new revision and keep their original object number.
|
|
|
|
func (a *PdfAppender) updateObjectsDeep(obj core.PdfObject, processed map[core.PdfObject]struct{}) {
|
|
|
|
if processed == nil {
|
|
|
|
processed = map[core.PdfObject]struct{}{}
|
|
|
|
}
|
|
|
|
if _, ok := processed[obj]; ok || obj == nil {
|
2018-12-11 16:06:34 +03:00
|
|
|
return
|
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
processed[obj] = struct{}{}
|
|
|
|
|
2019-04-14 22:22:41 +00:00
|
|
|
err := core.ResolveReferencesDeep(obj, a.traversed)
|
|
|
|
if err != nil {
|
|
|
|
common.Log.Debug("ERROR: %v", err)
|
|
|
|
}
|
|
|
|
|
2018-12-11 16:06:34 +03:00
|
|
|
switch v := obj.(type) {
|
|
|
|
case *core.PdfIndirectObject:
|
2019-05-14 21:14:53 +00:00
|
|
|
// Change detection:
|
|
|
|
// - obj same as in read only reader (internal use by appender) - definately no change.
|
|
|
|
// - obj is from original reader (derivative of original document) - mark for update if PdfObj WriteString has changed.
|
|
|
|
// - obj is from another source (another file or new) - add as new object.
|
|
|
|
switch {
|
|
|
|
case v.GetParser() == a.roReader.parser:
|
|
|
|
// obj same as in read only reader (internal use by appender) - definitely no change.
|
|
|
|
return
|
|
|
|
case v.GetParser() == a.Reader.parser:
|
|
|
|
// obj is from original reader (derivative of original document) - mark for update if PdfObj WriteString has changed.
|
|
|
|
origObj, _ := a.roReader.GetIndirectObjectByNumber(int(v.ObjectNumber))
|
|
|
|
origInd, ok := origObj.(*core.PdfIndirectObject)
|
|
|
|
if ok && origInd != nil {
|
|
|
|
if origInd.PdfObject != v.PdfObject && origInd.PdfObject.WriteString() != v.PdfObject.WriteString() {
|
|
|
|
a.addNewObject(obj)
|
|
|
|
a.replaceObjects[obj] = v.ObjectNumber
|
|
|
|
}
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
// obj is from another source (another file or new) - add as new object.
|
|
|
|
a.addNewObject(obj)
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
a.updateObjectsDeep(v.PdfObject, processed)
|
2018-12-11 16:06:34 +03:00
|
|
|
case *core.PdfObjectArray:
|
|
|
|
for _, o := range v.Elements() {
|
2019-05-14 21:14:53 +00:00
|
|
|
a.updateObjectsDeep(o, processed)
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
case *core.PdfObjectDictionary:
|
|
|
|
for _, key := range v.Keys() {
|
2019-05-14 21:14:53 +00:00
|
|
|
a.updateObjectsDeep(v.Get(key), processed)
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
case *core.PdfObjectStreams:
|
2019-02-19 17:59:38 +02:00
|
|
|
// If the current parser is different from the read-only parser, then
|
|
|
|
// the object has changed.
|
2018-12-11 16:06:34 +03:00
|
|
|
if v.GetParser() != a.roReader.parser {
|
|
|
|
for _, o := range v.Elements() {
|
2019-05-14 21:14:53 +00:00
|
|
|
a.updateObjectsDeep(o, processed)
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
case *core.PdfObjectStream:
|
2019-02-19 17:59:38 +02:00
|
|
|
// If the current parser is different from the read-only parser, then
|
|
|
|
// the object has changed.
|
2019-05-14 21:14:53 +00:00
|
|
|
switch {
|
|
|
|
case v.GetParser() == a.roReader.parser:
|
|
|
|
// original roReader - no changes.
|
2018-12-11 16:06:34 +03:00
|
|
|
return
|
2019-05-14 21:14:53 +00:00
|
|
|
case v.GetParser() == a.Reader.parser:
|
|
|
|
// same source document, potentially modified.
|
2019-02-19 17:59:38 +02:00
|
|
|
// Check if data has changed.
|
2018-12-11 16:06:34 +03:00
|
|
|
if streamObj, err := a.roReader.parser.LookupByReference(v.PdfObjectReference); err == nil {
|
|
|
|
var isNotChanged bool
|
|
|
|
if stream, ok := core.GetStream(streamObj); ok && bytes.Equal(stream.Stream, v.Stream) {
|
|
|
|
isNotChanged = true
|
|
|
|
}
|
|
|
|
if dict, ok := core.GetDict(streamObj); isNotChanged && ok {
|
2018-12-12 09:47:28 +00:00
|
|
|
isNotChanged = dict.WriteString() == v.PdfObjectDictionary.WriteString()
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
if isNotChanged {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
if v.ObjectNumber != 0 {
|
|
|
|
a.replaceObjects[obj] = v.ObjectNumber
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
// other source - add new.
|
|
|
|
if _, has := a.hasNewObject[obj]; !has {
|
|
|
|
a.addNewObject(obj)
|
|
|
|
}
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
a.updateObjectsDeep(v.PdfObjectDictionary, processed)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// addNewObject adds a new object to be written out in the new revision, either with as a new
|
|
|
|
// object or updating an older object (if replaceObjects entry set for obj).
|
|
|
|
func (a *PdfAppender) addNewObject(obj core.PdfObject) {
|
|
|
|
if _, has := a.hasNewObject[obj]; !has {
|
2018-12-11 16:06:34 +03:00
|
|
|
a.newObjects = append(a.newObjects, obj)
|
|
|
|
a.hasNewObject[obj] = struct{}{}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// mergeResources adds new named resources from src to dest. If the resources have the same name its will be renamed.
|
|
|
|
// The dest and src are resources dictionary. resourcesRenameMap is a rename map for resources.
|
|
|
|
func (a *PdfAppender) mergeResources(dest, src core.PdfObject, resourcesRenameMap map[core.PdfObjectName]core.PdfObjectName) core.PdfObject {
|
|
|
|
if src == nil && dest == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if src == nil {
|
|
|
|
return dest
|
|
|
|
}
|
|
|
|
|
|
|
|
srcDict, ok := core.GetDict(src)
|
|
|
|
if !ok {
|
|
|
|
return dest
|
|
|
|
}
|
|
|
|
if dest == nil {
|
|
|
|
dict := core.MakeDict()
|
|
|
|
dict.Merge(srcDict)
|
|
|
|
return src
|
|
|
|
}
|
|
|
|
|
|
|
|
destDict, ok := core.GetDict(dest)
|
|
|
|
if !ok {
|
|
|
|
common.Log.Error("Error resource is not a dictionary")
|
|
|
|
destDict = core.MakeDict()
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, key := range srcDict.Keys() {
|
|
|
|
if newKey, found := resourcesRenameMap[key]; found {
|
|
|
|
destDict.Set(newKey, srcDict.Get(key))
|
|
|
|
} else {
|
|
|
|
destDict.Set(key, srcDict.Get(key))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return destDict
|
|
|
|
}
|
|
|
|
|
|
|
|
// MergePageWith appends page content to source Pdf file page content.
|
|
|
|
func (a *PdfAppender) MergePageWith(pageNum int, page *PdfPage) error {
|
|
|
|
pageIndex := pageNum - 1
|
|
|
|
var srcPage *PdfPage
|
|
|
|
for i, p := range a.pages {
|
|
|
|
if i == pageIndex {
|
|
|
|
srcPage = p
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if srcPage == nil {
|
|
|
|
return fmt.Errorf("ERROR: Page dictionary %d not found in the source document", pageNum)
|
|
|
|
}
|
|
|
|
if srcPage.primitive != nil && srcPage.primitive.GetParser() == a.roReader.parser {
|
|
|
|
srcPage = srcPage.Duplicate()
|
|
|
|
a.pages[pageIndex] = srcPage
|
|
|
|
}
|
|
|
|
|
|
|
|
page = page.Duplicate()
|
|
|
|
procPage(page)
|
|
|
|
|
|
|
|
srcResources := getPageResources(srcPage)
|
|
|
|
pageResources := getPageResources(page)
|
|
|
|
resourcesRenameMap := make(map[core.PdfObjectName]core.PdfObjectName)
|
|
|
|
|
|
|
|
for key := range pageResources {
|
|
|
|
if _, found := srcResources[key]; found {
|
|
|
|
for i := 1; true; i++ {
|
|
|
|
newKey := core.PdfObjectName(string(key) + strconv.Itoa(i))
|
|
|
|
if _, exists := srcResources[newKey]; !exists {
|
|
|
|
resourcesRenameMap[key] = newKey
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
contentStreams, err := page.GetContentStreams()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
srcContentStreams, err := srcPage.GetContentStreams()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
for i, stream := range contentStreams {
|
|
|
|
for oldName, newName := range resourcesRenameMap {
|
2019-05-14 21:14:53 +00:00
|
|
|
// TODO: Not accurate, e.g. "/F1" could replace part of "/F12" etc.
|
2018-12-11 16:06:34 +03:00
|
|
|
stream = strings.Replace(stream, "/"+string(oldName), "/"+string(newName), -1)
|
|
|
|
}
|
|
|
|
contentStreams[i] = stream
|
|
|
|
}
|
|
|
|
|
|
|
|
srcContentStreams = append(srcContentStreams, contentStreams...)
|
|
|
|
if err := srcPage.SetContentStreams(srcContentStreams, core.NewFlateEncoder()); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2019-04-14 22:22:41 +00:00
|
|
|
for _, a := range page.annotations {
|
|
|
|
srcPage.annotations = append(srcPage.annotations, a)
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
if srcPage.Resources == nil {
|
|
|
|
srcPage.Resources = NewPdfPageResources()
|
|
|
|
}
|
|
|
|
|
|
|
|
if page.Resources != nil {
|
|
|
|
srcPage.Resources.Font = a.mergeResources(srcPage.Resources.Font, page.Resources.Font, resourcesRenameMap)
|
|
|
|
srcPage.Resources.XObject = a.mergeResources(srcPage.Resources.XObject, page.Resources.XObject, resourcesRenameMap)
|
|
|
|
srcPage.Resources.Properties = a.mergeResources(srcPage.Resources.Properties, page.Resources.Properties, resourcesRenameMap)
|
|
|
|
if srcPage.Resources.ProcSet == nil {
|
|
|
|
srcPage.Resources.ProcSet = page.Resources.ProcSet
|
|
|
|
}
|
|
|
|
srcPage.Resources.Shading = a.mergeResources(srcPage.Resources.Shading, page.Resources.Shading, resourcesRenameMap)
|
|
|
|
srcPage.Resources.ExtGState = a.mergeResources(srcPage.Resources.ExtGState, page.Resources.ExtGState, resourcesRenameMap)
|
|
|
|
}
|
|
|
|
|
|
|
|
srcMediaBox, err := srcPage.GetMediaBox()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
pageMediaBox, err := page.GetMediaBox()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
var mediaBoxChanged bool
|
|
|
|
|
|
|
|
if srcMediaBox.Llx > pageMediaBox.Llx {
|
|
|
|
srcMediaBox.Llx = pageMediaBox.Llx
|
|
|
|
mediaBoxChanged = true
|
|
|
|
}
|
|
|
|
if srcMediaBox.Lly > pageMediaBox.Lly {
|
|
|
|
srcMediaBox.Lly = pageMediaBox.Lly
|
|
|
|
mediaBoxChanged = true
|
|
|
|
}
|
|
|
|
if srcMediaBox.Urx < pageMediaBox.Urx {
|
|
|
|
srcMediaBox.Urx = pageMediaBox.Urx
|
|
|
|
mediaBoxChanged = true
|
|
|
|
}
|
|
|
|
if srcMediaBox.Ury < pageMediaBox.Ury {
|
|
|
|
srcMediaBox.Ury = pageMediaBox.Ury
|
|
|
|
mediaBoxChanged = true
|
|
|
|
}
|
|
|
|
|
|
|
|
if mediaBoxChanged {
|
|
|
|
srcPage.MediaBox = srcMediaBox
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-01-17 23:55:27 +00:00
|
|
|
// AddPages adds pages to be appended to the end of the source PDF.
|
2018-12-11 16:06:34 +03:00
|
|
|
func (a *PdfAppender) AddPages(pages ...*PdfPage) {
|
|
|
|
for _, page := range pages {
|
|
|
|
page = page.Duplicate()
|
|
|
|
procPage(page)
|
|
|
|
a.pages = append(a.pages, page)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// RemovePage removes a page by number.
|
|
|
|
func (a *PdfAppender) RemovePage(pageNum int) {
|
|
|
|
pageIndex := pageNum - 1
|
2019-05-14 21:14:53 +00:00
|
|
|
|
|
|
|
// Remove from the pages list.
|
|
|
|
a.pages = append(a.pages[0:pageIndex], a.pages[pageNum:]...)
|
|
|
|
}
|
|
|
|
|
|
|
|
// replaceObject registers `replacement` as a replacement for `obj` in the appended revision.
|
|
|
|
// If an indirect object/stream it will maintain the same object number in the following
|
|
|
|
// revision.
|
|
|
|
func (a *PdfAppender) replaceObject(obj, replacement core.PdfObject) {
|
|
|
|
switch t := obj.(type) {
|
|
|
|
case *core.PdfIndirectObject:
|
|
|
|
a.replaceObjects[replacement] = t.ObjectNumber
|
|
|
|
case *core.PdfObjectStream:
|
|
|
|
a.replaceObjects[replacement] = t.ObjectNumber
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// UpdateObject marks `obj` as updated and to be included in the following revision.
|
|
|
|
func (a *PdfAppender) UpdateObject(obj core.PdfObject) {
|
|
|
|
a.replaceObject(obj, obj)
|
|
|
|
if _, has := a.hasNewObject[obj]; !has {
|
|
|
|
a.newObjects = append(a.newObjects, obj)
|
|
|
|
a.hasNewObject[obj] = struct{}{}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// UpdatePage updates the `page` in the new revision if it has changed.
|
|
|
|
func (a *PdfAppender) UpdatePage(page *PdfPage) {
|
|
|
|
a.updateObjectsDeep(page.ToPdfObject(), nil)
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// ReplacePage replaces the original page to a new page.
|
|
|
|
func (a *PdfAppender) ReplacePage(pageNum int, page *PdfPage) {
|
|
|
|
pageIndex := pageNum - 1
|
|
|
|
for i := range a.pages {
|
|
|
|
if i == pageIndex {
|
|
|
|
p := page.Duplicate()
|
|
|
|
procPage(p)
|
|
|
|
a.pages[i] = p
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-19 17:59:38 +02:00
|
|
|
// Sign signs a specific page with a digital signature.
|
|
|
|
// The signature field parameter must have a valid signature dictionary
|
|
|
|
// specified by its V field.
|
2019-02-15 20:59:17 +02:00
|
|
|
func (a *PdfAppender) Sign(pageNum int, field *PdfFieldSignature) error {
|
|
|
|
if field == nil {
|
|
|
|
return errors.New("signature field cannot be nil")
|
2018-12-19 18:36:15 +03:00
|
|
|
}
|
2019-02-12 19:18:39 +02:00
|
|
|
|
2019-02-15 20:59:17 +02:00
|
|
|
signature := field.V
|
|
|
|
if signature == nil {
|
2019-02-19 17:59:38 +02:00
|
|
|
return errors.New("signature dictionary cannot be nil")
|
2019-02-15 20:59:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Get a copy of the selected page.
|
2018-12-19 18:36:15 +03:00
|
|
|
pageIndex := pageNum - 1
|
2019-02-12 19:18:39 +02:00
|
|
|
if pageIndex < 0 || pageIndex > len(a.pages)-1 {
|
2019-02-15 20:59:17 +02:00
|
|
|
return fmt.Errorf("page %d not found", pageNum)
|
2018-12-19 18:36:15 +03:00
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
page := a.Reader.PageList[pageIndex]
|
2018-12-19 18:36:15 +03:00
|
|
|
|
2019-02-15 20:59:17 +02:00
|
|
|
// Add signature field annotations to the page annotations.
|
2019-02-21 00:20:47 +02:00
|
|
|
field.P = page.ToPdfObject()
|
2019-02-21 20:24:06 +02:00
|
|
|
if field.T == nil || field.T.String() == "" {
|
2019-02-21 20:47:43 +02:00
|
|
|
field.T = core.MakeString(fmt.Sprintf("Signature %d", pageNum))
|
2019-02-21 20:24:06 +02:00
|
|
|
}
|
2019-04-14 22:22:41 +00:00
|
|
|
page.AddAnnotation(field.PdfAnnotationWidget.PdfAnnotation)
|
2019-02-15 20:59:17 +02:00
|
|
|
|
|
|
|
// Add signature field to the form.
|
2019-05-14 21:14:53 +00:00
|
|
|
if a.acroForm == a.roReader.AcroForm {
|
|
|
|
a.acroForm = a.Reader.AcroForm
|
|
|
|
}
|
2019-02-21 23:40:26 +02:00
|
|
|
acroForm := a.acroForm
|
2019-02-15 20:59:17 +02:00
|
|
|
if acroForm == nil {
|
2019-05-14 21:14:53 +00:00
|
|
|
acroForm = NewPdfAcroForm()
|
2019-02-15 20:59:17 +02:00
|
|
|
}
|
2019-02-21 20:24:06 +02:00
|
|
|
acroForm.SigFlags = core.MakeInteger(3)
|
2019-02-15 20:59:17 +02:00
|
|
|
|
|
|
|
fields := append(acroForm.AllFields(), field.PdfField)
|
2018-12-19 18:36:15 +03:00
|
|
|
acroForm.Fields = &fields
|
2019-02-15 20:59:17 +02:00
|
|
|
a.ReplaceAcroForm(acroForm)
|
2018-12-19 18:36:15 +03:00
|
|
|
|
2019-02-15 20:59:17 +02:00
|
|
|
// Replace original page.
|
2019-05-14 21:14:53 +00:00
|
|
|
a.UpdatePage(page)
|
2018-12-19 18:36:15 +03:00
|
|
|
a.pages[pageIndex] = page
|
|
|
|
|
2019-02-15 20:59:17 +02:00
|
|
|
return nil
|
2018-12-19 18:36:15 +03:00
|
|
|
}
|
|
|
|
|
2019-01-17 23:55:27 +00:00
|
|
|
// ReplaceAcroForm replaces the acrobat form. It appends a new form to the Pdf which
|
|
|
|
// replaces the original AcroForm.
|
2018-12-11 16:06:34 +03:00
|
|
|
func (a *PdfAppender) ReplaceAcroForm(acroForm *PdfAcroForm) {
|
2019-05-14 21:14:53 +00:00
|
|
|
if acroForm != nil {
|
|
|
|
a.updateObjectsDeep(acroForm.ToPdfObject(), nil)
|
|
|
|
}
|
2018-12-11 16:06:34 +03:00
|
|
|
a.acroForm = acroForm
|
|
|
|
}
|
|
|
|
|
|
|
|
// Write writes the Appender output to io.Writer.
|
2019-02-19 17:59:38 +02:00
|
|
|
// It can only be called once and further invocations will result in an error.
|
2018-12-11 16:06:34 +03:00
|
|
|
func (a *PdfAppender) Write(w io.Writer) error {
|
2019-01-18 01:19:24 +00:00
|
|
|
if a.written {
|
|
|
|
return errors.New("appender write can only be invoked once")
|
|
|
|
}
|
|
|
|
|
2018-12-11 16:06:34 +03:00
|
|
|
writer := NewPdfWriter()
|
|
|
|
|
|
|
|
pagesDict, ok := core.GetDict(writer.pages)
|
|
|
|
if !ok {
|
2019-05-14 21:14:53 +00:00
|
|
|
return errors.New("invalid Pages obj (not a dict)")
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
kids, ok := pagesDict.Get("Kids").(*core.PdfObjectArray)
|
|
|
|
if !ok {
|
2019-05-14 21:14:53 +00:00
|
|
|
return errors.New("invalid Pages Kids obj (not an array)")
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
pageCount, ok := pagesDict.Get("Count").(*core.PdfObjectInteger)
|
|
|
|
if !ok {
|
2019-05-14 21:14:53 +00:00
|
|
|
return errors.New("invalid Pages Count object (not an integer)")
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
parser := a.roReader.parser
|
|
|
|
trailer := parser.GetTrailer()
|
|
|
|
if trailer == nil {
|
2019-05-14 21:14:53 +00:00
|
|
|
return errors.New("missing trailer")
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
// Catalog.
|
2019-05-14 21:14:53 +00:00
|
|
|
catalogContainer, ok := core.GetIndirect(trailer.Get("Root"))
|
2018-12-11 16:06:34 +03:00
|
|
|
if !ok {
|
2019-05-14 21:14:53 +00:00
|
|
|
return errors.New("catalog container not found")
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
catalog, ok := core.GetDict(catalogContainer)
|
2018-12-11 16:06:34 +03:00
|
|
|
if !ok {
|
2019-05-14 21:14:53 +00:00
|
|
|
common.Log.Debug("ERROR: Missing catalog: (root %q) (trailer %s)", catalogContainer, *trailer)
|
|
|
|
return errors.New("missing catalog")
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
|
2019-05-14 21:14:53 +00:00
|
|
|
// Add the keys which are not set.
|
2018-12-11 16:06:34 +03:00
|
|
|
for _, key := range catalog.Keys() {
|
|
|
|
if writer.catalog.Get(key) == nil {
|
|
|
|
obj := catalog.Get(key)
|
|
|
|
writer.catalog.Set(key, obj)
|
|
|
|
}
|
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
if a.acroForm != nil {
|
|
|
|
writer.catalog.Set("AcroForm", a.acroForm.ToPdfObject())
|
|
|
|
a.updateObjectsDeep(a.acroForm.ToPdfObject(), nil)
|
|
|
|
}
|
|
|
|
|
|
|
|
a.addNewObject(writer.infoObj)
|
|
|
|
a.addNewObject(writer.root)
|
|
|
|
|
|
|
|
// TODO: Represent the Pages as a model/object. PdfPages should represent the Pages dictionary.
|
|
|
|
pagesChanged := false
|
|
|
|
if len(a.roReader.PageList) != len(a.pages) {
|
|
|
|
pagesChanged = true
|
|
|
|
} else {
|
|
|
|
for i := range a.roReader.PageList {
|
|
|
|
switch {
|
|
|
|
case a.pages[i] == a.roReader.PageList[i]:
|
|
|
|
// from ro reader - no change.
|
|
|
|
case a.pages[i] == a.Reader.PageList[i]:
|
|
|
|
// same as original file (possibly some modification of the page itself).
|
|
|
|
default:
|
|
|
|
// Different source.
|
|
|
|
pagesChanged = true
|
|
|
|
}
|
|
|
|
if pagesChanged {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if pagesChanged {
|
|
|
|
a.updateObjectsDeep(writer.pages, nil)
|
|
|
|
} else {
|
|
|
|
a.ignoreObjects[writer.pages] = struct{}{}
|
|
|
|
}
|
|
|
|
// If pages unchanged, should not change the Pages.
|
|
|
|
writer.pages.ObjectNumber = a.Reader.pagesContainer.ObjectNumber
|
|
|
|
a.replaceObjects[writer.pages] = a.Reader.pagesContainer.ObjectNumber
|
2018-12-11 16:06:34 +03:00
|
|
|
|
|
|
|
inheritedFields := []core.PdfObjectName{"Resources", "MediaBox", "CropBox", "Rotate"}
|
|
|
|
for _, p := range a.pages {
|
|
|
|
// Update the count.
|
|
|
|
obj := p.ToPdfObject()
|
2019-04-14 22:22:41 +00:00
|
|
|
|
2018-12-11 16:06:34 +03:00
|
|
|
*pageCount = *pageCount + 1
|
|
|
|
// Check the object is not changing.
|
|
|
|
// If the indirect object has the parser which equals to the readonly then the object is not changed.
|
|
|
|
if ind, ok := obj.(*core.PdfIndirectObject); ok && ind.GetParser() == a.roReader.parser {
|
|
|
|
kids.Append(&ind.PdfObjectReference)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if pDict, ok := core.GetDict(obj); ok {
|
|
|
|
parent, hasParent := pDict.Get("Parent").(*core.PdfIndirectObject)
|
|
|
|
for hasParent {
|
|
|
|
common.Log.Trace("Page Parent: %T", parent)
|
|
|
|
parentDict, ok := parent.PdfObject.(*core.PdfObjectDictionary)
|
|
|
|
if !ok {
|
2019-01-17 22:45:10 +00:00
|
|
|
return errors.New("invalid Parent object")
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
for _, field := range inheritedFields {
|
|
|
|
common.Log.Trace("Field %s", field)
|
|
|
|
if pDict.Get(field) != nil {
|
|
|
|
common.Log.Trace("- page has already")
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if obj := parentDict.Get(field); obj != nil {
|
|
|
|
// Parent has the field. Inherit, pass to the new page.
|
|
|
|
common.Log.Trace("Inheriting field %s", field)
|
|
|
|
pDict.Set(field, obj)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
parent, hasParent = parentDict.Get("Parent").(*core.PdfIndirectObject)
|
|
|
|
common.Log.Trace("Next parent: %T", parentDict.Get("Parent"))
|
|
|
|
}
|
|
|
|
pDict.Set("Parent", writer.pages)
|
|
|
|
}
|
2019-05-14 21:14:53 +00:00
|
|
|
a.updateObjectsDeep(obj, nil)
|
2018-12-11 16:06:34 +03:00
|
|
|
kids.Append(obj)
|
|
|
|
}
|
|
|
|
|
2018-12-19 18:36:15 +03:00
|
|
|
if _, err := a.rs.Seek(0, io.SeekStart); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2019-01-18 01:19:24 +00:00
|
|
|
// Digital signature handling: Check if any of the new objects represent a signature dictionary.
|
|
|
|
// The byte range is later updated dynamically based on the position of the actual signature
|
|
|
|
// Contents.
|
2018-12-19 18:36:15 +03:00
|
|
|
digestWriters := make(map[SignatureHandler]io.Writer)
|
|
|
|
byteRange := core.MakeArray()
|
|
|
|
for _, obj := range a.newObjects {
|
|
|
|
if ind, found := core.GetIndirect(obj); found {
|
|
|
|
if sigDict, found := ind.PdfObject.(*pdfSignDictionary); found {
|
|
|
|
handler := *sigDict.handler
|
2019-01-14 12:39:19 +03:00
|
|
|
var err error
|
|
|
|
digestWriters[handler], err = handler.NewDigest(sigDict.signature)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2018-12-19 18:36:15 +03:00
|
|
|
byteRange.Append(core.MakeInteger(0xfffff), core.MakeInteger(0xfffff))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if byteRange.Len() > 0 {
|
|
|
|
byteRange.Append(core.MakeInteger(0xfffff), core.MakeInteger(0xfffff))
|
|
|
|
}
|
|
|
|
for _, obj := range a.newObjects {
|
|
|
|
if ind, found := core.GetIndirect(obj); found {
|
|
|
|
if sigDict, found := ind.PdfObject.(*pdfSignDictionary); found {
|
|
|
|
sigDict.Set("ByteRange", byteRange)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
hasSigDict := len(digestWriters) > 0
|
|
|
|
|
|
|
|
var reader io.Reader = a.rs
|
|
|
|
if hasSigDict {
|
|
|
|
writers := make([]io.Writer, 0, len(digestWriters))
|
|
|
|
for _, hash := range digestWriters {
|
|
|
|
writers = append(writers, hash)
|
|
|
|
}
|
|
|
|
reader = io.TeeReader(a.rs, io.MultiWriter(writers...))
|
|
|
|
}
|
|
|
|
|
2019-01-18 01:19:24 +00:00
|
|
|
// Write the original PDF.
|
2018-12-19 18:36:15 +03:00
|
|
|
offset, err := io.Copy(w, reader)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2018-12-11 16:06:34 +03:00
|
|
|
if len(a.newObjects) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
writer.writeOffset = offset
|
|
|
|
writer.ObjNumOffset = a.greatestObjNum
|
|
|
|
writer.appendMode = true
|
|
|
|
writer.appendToXrefs = a.xrefs
|
2019-05-14 21:14:53 +00:00
|
|
|
writer.appendXrefPrevOffset = a.xrefOffset
|
|
|
|
writer.appendPrevRevisionSize = a.prevRevisionSize
|
|
|
|
writer.minorVersion = a.roReader.PdfVersion().Minor
|
|
|
|
writer.appendReplaceMap = a.replaceObjects
|
|
|
|
|
|
|
|
xrefType := a.parser.GetXrefType()
|
|
|
|
if xrefType != nil {
|
|
|
|
v := *xrefType == core.XrefTypeObjectStream
|
|
|
|
writer.useCrossReferenceStream = &v
|
|
|
|
}
|
|
|
|
|
|
|
|
// Reset the objects in the writer.
|
|
|
|
writer.objectsMap = map[core.PdfObject]struct{}{}
|
|
|
|
writer.objects = []core.PdfObject{}
|
2018-12-11 16:06:34 +03:00
|
|
|
|
|
|
|
for _, obj := range a.newObjects {
|
2019-05-14 21:14:53 +00:00
|
|
|
if _, ignore := a.ignoreObjects[obj]; ignore {
|
|
|
|
continue
|
|
|
|
}
|
2018-12-11 16:06:34 +03:00
|
|
|
writer.addObject(obj)
|
|
|
|
}
|
2018-12-19 18:36:15 +03:00
|
|
|
|
|
|
|
writerW := w
|
|
|
|
if hasSigDict {
|
2019-02-19 17:59:38 +02:00
|
|
|
// For signatures, we need to write twice. First to find the byte offset
|
|
|
|
// of the Contents and then dynamically update the file with the
|
|
|
|
// signature and ByteRange.
|
2018-12-19 18:36:15 +03:00
|
|
|
writerW = bytes.NewBuffer(nil)
|
|
|
|
}
|
|
|
|
|
2019-01-18 01:19:24 +00:00
|
|
|
// Perform the write. For signatures will do a mock write to a buffer.
|
2018-12-19 18:36:15 +03:00
|
|
|
if err := writer.Write(writerW); err != nil {
|
2018-12-11 16:06:34 +03:00
|
|
|
return err
|
|
|
|
}
|
2018-12-19 18:36:15 +03:00
|
|
|
|
2019-01-18 01:19:24 +00:00
|
|
|
// TODO(gunnsth): Consider whether the dynamic content can be handled efficiently with generic write hooks?
|
|
|
|
// Logic is getting pretty complex here.
|
2018-12-19 18:36:15 +03:00
|
|
|
if hasSigDict {
|
2019-01-18 01:19:24 +00:00
|
|
|
// Update the byteRanges based on mock write.
|
2018-12-19 18:36:15 +03:00
|
|
|
bufferData := writerW.(*bytes.Buffer).Bytes()
|
|
|
|
byteRange := core.MakeArray()
|
|
|
|
var sigDicts []*pdfSignDictionary
|
|
|
|
var lastPosition int64
|
|
|
|
for _, obj := range writer.objects {
|
|
|
|
if ind, found := core.GetIndirect(obj); found {
|
|
|
|
if sigDict, found := ind.PdfObject.(*pdfSignDictionary); found {
|
|
|
|
sigDicts = append(sigDicts, sigDict)
|
|
|
|
newPosition := sigDict.fileOffset + int64(sigDict.contentsOffsetStart)
|
|
|
|
byteRange.Append(
|
|
|
|
core.MakeInteger(lastPosition),
|
|
|
|
core.MakeInteger(newPosition-lastPosition),
|
|
|
|
)
|
|
|
|
lastPosition = sigDict.fileOffset + int64(sigDict.contentsOffsetEnd)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
byteRange.Append(
|
|
|
|
core.MakeInteger(lastPosition),
|
|
|
|
core.MakeInteger(offset+int64(len(bufferData))-lastPosition),
|
|
|
|
)
|
|
|
|
// set the ByteRange value
|
|
|
|
byteRangeData := []byte(byteRange.WriteString())
|
|
|
|
for _, sigDict := range sigDicts {
|
|
|
|
bufferOffset := int(sigDict.fileOffset - offset)
|
|
|
|
for i := sigDict.byteRangeOffsetStart; i < sigDict.byteRangeOffsetEnd; i++ {
|
|
|
|
bufferData[bufferOffset+i] = ' '
|
|
|
|
}
|
|
|
|
dst := bufferData[bufferOffset+sigDict.byteRangeOffsetStart : bufferOffset+sigDict.byteRangeOffsetEnd]
|
|
|
|
copy(dst, byteRangeData)
|
|
|
|
}
|
|
|
|
var prevOffset int
|
|
|
|
for _, sigDict := range sigDicts {
|
|
|
|
bufferOffset := int(sigDict.fileOffset - offset)
|
|
|
|
data := bufferData[prevOffset : bufferOffset+sigDict.contentsOffsetStart]
|
|
|
|
handler := *sigDict.handler
|
|
|
|
digestWriters[handler].Write(data)
|
|
|
|
prevOffset = bufferOffset + sigDict.contentsOffsetEnd
|
|
|
|
}
|
|
|
|
for _, sigDict := range sigDicts {
|
|
|
|
data := bufferData[prevOffset:]
|
|
|
|
handler := *sigDict.handler
|
|
|
|
digestWriters[handler].Write(data)
|
|
|
|
}
|
|
|
|
for _, sigDict := range sigDicts {
|
|
|
|
bufferOffset := int(sigDict.fileOffset - offset)
|
|
|
|
handler := *sigDict.handler
|
|
|
|
digest := digestWriters[handler]
|
|
|
|
if err := handler.Sign(sigDict.signature, digest); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-02-27 23:32:53 +02:00
|
|
|
sigDict.signature.ByteRange = byteRange
|
2018-12-19 18:36:15 +03:00
|
|
|
contents := []byte(sigDict.signature.Contents.WriteString())
|
|
|
|
|
2019-01-18 01:19:24 +00:00
|
|
|
// Empty out the ByteRange and Content data.
|
|
|
|
// FIXME(gunnsth): Is this needed? Seems like the correct data is copied below? Prefer
|
|
|
|
// to keep the rest space?
|
2018-12-19 18:36:15 +03:00
|
|
|
for i := sigDict.byteRangeOffsetStart; i < sigDict.byteRangeOffsetEnd; i++ {
|
|
|
|
bufferData[bufferOffset+i] = ' '
|
|
|
|
}
|
|
|
|
for i := sigDict.contentsOffsetStart; i < sigDict.contentsOffsetEnd; i++ {
|
|
|
|
bufferData[bufferOffset+i] = ' '
|
|
|
|
}
|
|
|
|
|
2019-01-18 01:19:24 +00:00
|
|
|
// Copy the actual ByteRange and Contents data into the buffer prepared by first write.
|
2018-12-19 18:36:15 +03:00
|
|
|
dst := bufferData[bufferOffset+sigDict.byteRangeOffsetStart : bufferOffset+sigDict.byteRangeOffsetEnd]
|
|
|
|
copy(dst, byteRangeData)
|
|
|
|
dst = bufferData[bufferOffset+sigDict.contentsOffsetStart : bufferOffset+sigDict.contentsOffsetEnd]
|
|
|
|
copy(dst, contents)
|
|
|
|
}
|
|
|
|
|
2019-01-18 01:19:24 +00:00
|
|
|
buffer := bytes.NewBuffer(bufferData)
|
2018-12-19 18:36:15 +03:00
|
|
|
_, err = io.Copy(w, buffer)
|
2019-01-18 01:19:24 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2018-12-19 18:36:15 +03:00
|
|
|
}
|
|
|
|
|
2019-02-12 19:18:39 +02:00
|
|
|
a.written = true
|
2019-01-18 01:19:24 +00:00
|
|
|
return nil
|
2018-12-11 16:06:34 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// WriteToFile writes the Appender output to file specified by path.
|
|
|
|
func (a *PdfAppender) WriteToFile(outputPath string) error {
|
|
|
|
fWrite, err := os.Create(outputPath)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
defer fWrite.Close()
|
|
|
|
return a.Write(fWrite)
|
|
|
|
}
|