当前位置:网站首页>Golang -- multiple processing scenarios for files

Golang -- multiple processing scenarios for files

2022-06-23 14:42:00 It artist rookie

Create an empty file

package main

import (
    "log"
    "os"
)

var (
    newFile *os.File
    err     error
)

func main() {
    
    newFile, err = os.Create("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }
    log.Println(newFile)
    newFile.Close()
}

Truncate Crop file

package main

import (
    "log"
    "os"
)

func main() {
    
    //  Cut a file to 100 Bytes .
    //  If the files are less than 100 Bytes , The original contents of the document are preserved , The remaining bytes are as follows null Byte padding .
    //  If the document should have exceeded 100 Bytes , The extra bytes will be discarded .
    //  So we always get accurate 100 Byte file .
    //  Pass in 0 The file will be emptied .

    err := os.Truncate("test.txt", 100)
    if err != nil {
    
        log.Fatal(err)
    }
}

Get file information

package main

import (
    "fmt"
    "log"
    "os"
)

var (
    fileInfo os.FileInfo
    err      error
)

func main() {
    
    //  If the file doesn't exist , Returns an error 
    fileInfo, err = os.Stat("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }
    fmt.Println("File name:", fileInfo.Name())
    fmt.Println("Size in bytes:", fileInfo.Size())
    fmt.Println("Permissions:", fileInfo.Mode())
    fmt.Println("Last modified:", fileInfo.ModTime())
    fmt.Println("Is Directory: ", fileInfo.IsDir())
    fmt.Printf("System interface type: %T\n", fileInfo.Sys())
    fmt.Printf("System info: %+v\n\n", fileInfo.Sys())
}

Rename and move

package main

import (
    "log"
    "os"
)

func main() {
    
    originalPath := "test.txt"
    newPath := "test2.txt"
    err := os.Rename(originalPath, newPath)
    if err != nil {
    
        log.Fatal(err)
    }
}

Delete file

package main

import (
    "log"
    "os"
)

func main() {
    
    err := os.Remove("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }
}

Open and close files

package main

import (
    "log"
    "os"
)

func main() {
    
    //  Simply open as read-only . The following example will introduce examples of reading and writing .
    file, err := os.Open("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }
    file.Close()

    // OpenFile Provide more options .
    //  The last parameter is the permission mode permission mode
    //  The second is the open property  
    file, err = os.OpenFile("test.txt", os.O_APPEND, 0666)
    if err != nil {
    
        log.Fatal(err)
    }
    file.Close()

    //  The following properties can be used alone , You can also combine .
    //  When combined, you can use  OR  The operation setting  OpenFile Second parameter of , for example :
    // os.O_CREATE|os.O_APPEND
    //  perhaps  os.O_CREATE|os.O_TRUNC|os.O_WRONLY

    // os.O_RDONLY //  read-only 
    // os.O_WRONLY //  Just write 
    // os.O_RDWR //  Reading and writing 
    // os.O_APPEND //  Add... To the file (Append)
    // os.O_CREATE //  If the file does not exist, create 
    // os.O_TRUNC //  Crop the file when the file is opened 
    // os.O_EXCL //  and O_CREATE Use it together , File cannot exist 
    // os.O_SYNC //  To synchronize I/O The way to open 
}

Check if the file exists

package main

import (
    "log"
    "os"
)

var (
    fileInfo *os.FileInfo
    err      error
)

func main() {
    
    //  If the file does not exist, return error
    fileInfo, err := os.Stat("test.txt")
    if err != nil {
    
        if os.IsNotExist(err) {
    
            log.Fatal("File does not exist.")
        }
    }
    log.Println("File does exist. File information:")
    log.Println(fileInfo)
}

Check read and write permissions

package main

import (
    "log"
    "os"
)

func main() {
    
    //  This example tests write permissions , If there is no write permission, return error.
    //  Note that if the file does not exist, it will return error, Need to check error To find out which error caused .
    file, err := os.OpenFile("test.txt", os.O_WRONLY, 0666)
    if err != nil {
    
        if os.IsPermission(err) {
    
            log.Println("Error: Write permission denied.")
        }
    }
    file.Close()

    //  Test read permissions 
    file, err = os.OpenFile("test.txt", os.O_RDONLY, 0666)
    if err != nil {
    
        if os.IsPermission(err) {
    
            log.Println("Error: Read permission denied.")
        }
    }
    file.Close()
}

Change authority 、 The owner 、 Time stamp

package main

import (
    "log"
    "os"
    "time"
)

func main() {
    
    //  Use Linux Style change file permissions 
    err := os.Chmod("test.txt", 0777)
    if err != nil {
    
        log.Println(err)
    }

    //  Change file owner 
    err = os.Chown("test.txt", os.Getuid(), os.Getgid())
    if err != nil {
    
        log.Println(err)
    }

    //  Change timestamp 
    twoDaysFromNow := time.Now().Add(48 * time.Hour)
    lastAccessTime := twoDaysFromNow
    lastModifyTime := twoDaysFromNow
    err = os.Chtimes("test.txt", lastAccessTime, lastModifyTime)
    if err != nil {
    
        log.Println(err)
    }
}

Create hard links and soft links

An ordinary file is a file pointing to the hard disk inode The place of . Hard links create a new pointer to the same place . Only after all links are deleted will the file be deleted . Hard links work only on the same file system . You can think of a hard link as a normal link .

symbolic link, Also called soft connection , It's a little different from hard links , It doesn't point directly to the same place on the hard disk , Instead, reference other files by name . They can point to different files in different file systems . Not all operating systems support soft links .



package main

import (
    "os"
    "log"
    "fmt"
)

func main() {
    
    //  Create a hard link .
    //  After creation, the same file content will have two file names , Changing the contents of one file will affect another .
    //  Deleting and renaming do not affect another .
    err := os.Link("original.txt", "original_also.txt")
    if err != nil {
    
        log.Fatal(err)
    }

    fmt.Println("creating sym")
    // Create a symlink
    err = os.Symlink("original.txt", "original_sym.txt")
    if err != nil {
    
        log.Fatal(err)
    }

    // Lstat Returns information about a file , But when the file is a soft link , It returns soft link information , Not the information of the referenced file .
    // Symlink stay Windows Not working in .
    fileInfo, err := os.Lstat("original_sym.txt")
    if err != nil {
    
        log.Fatal(err)
    }
    fmt.Printf("Link info: %+v", fileInfo)

    // Changing the owner of the soft link will not affect the original file .
    err = os.Lchown("original_sym.txt", os.Getuid(), os.Getgid())
    if err != nil {
    
        log.Fatal(err)
    }
}

Copy file

package main

import (
    "os"
    "log"
    "io"
)

func main() {
    
    //  Open the original file 
    originalFile, err := os.Open("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }
    defer originalFile.Close()

    //  Create a new file as the target file 
    newFile, err := os.Create("test_copy.txt")
    if err != nil {
    
        log.Fatal(err)
    }
    defer newFile.Close()

    //  Copy bytes from source to destination file 
    bytesWritten, err := io.Copy(newFile, originalFile)
    if err != nil {
    
        log.Fatal(err)
    }
    log.Printf("Copied %d bytes.", bytesWritten)

    //  Put the contents of the document flush Go to the hard disk 
    err = newFile.Sync()
    if err != nil {
    
        log.Fatal(err)
    }
}

Jump to the specified location of the file (Seek)

package main

import (
    "os"
    "fmt"
    "log"
)

func main() {
    
    file, _ := os.Open("test.txt")
    defer file.Close()

    //  Off position , It can be positive or negative 
    var offset int64 = 5

    //  To calculate offset Initial position 
    // 0 =  File start location 
    // 1 =  The current position 
    // 2 =  At the end of the file 
    var whence int = 0
    newPosition, err := file.Seek(offset, whence)
    if err != nil {
    
        log.Fatal(err)
    }
    fmt.Println("Just moved to 5:", newPosition)

    //  Back two bytes from the current position 
    newPosition, err = file.Seek(-2, 1)
    if err != nil {
    
        log.Fatal(err)
    }
    fmt.Println("Just moved back two:", newPosition)

    //  Use the following technique to get the current position 
    currentPosition, err := file.Seek(0, 1)
    fmt.Println("Current position:", currentPosition)

    //  Go to the beginning of the file 
    newPosition, err = file.Seek(0, 0)
    if err != nil {
    
        log.Fatal(err)
    }
    fmt.Println("Position after seeking 0,0:", newPosition)
}

Writing documents

have access to os Package writes to an open file . because Go An executable package is a statically linked executable , you import Each package will increase the size of your executable . Other bags such as io、`ioutil`、`bufio` Some methods are provided , But they are not necessary .

package main

import (
    "os"
    "log"
)

func main() {
    
    //  Open file writable 
    file, err := os.OpenFile(
        "test.txt",
        os.O_WRONLY|os.O_TRUNC|os.O_CREATE,
        0666,
    )
    if err != nil {
    
        log.Fatal(err)
    }
    defer file.Close()

    //  Write bytes to file 
    byteSlice := []byte("Bytes!\n")
    bytesWritten, err := file.Write(byteSlice)
    if err != nil {
    
        log.Fatal(err)
    }
    log.Printf("Wrote %d bytes.\n", bytesWritten)
}

Write a file

ioutil Package has a very useful method WriteFile() Can handle creating or opening files 、 Write byte slice and close file . If you need to write byte slices into files quickly and concisely , You can use it .

package main

import (
    "io/ioutil"
    "log"
)

func main() {
    
    err := ioutil.WriteFile("test.txt", []byte("Hi\n"), 0666)
    if err != nil {
    
        log.Fatal(err)
    }
}

Write using cache

bufio The package provides caching writer, So you can use the memory cache before writing to the hard disk . It's useful when you deal with a lot of data , Because it can save operating hard disk I/O Time for . It is also useful in other cases , For example, you write one byte at a time , Save them in the memory cache , Then write it to the hard disk once , Reduce hard disk wear and improve performance .

package main

import (
    "log"
    "os"
    "bufio"
)

func main() {
    
    //  Open file , Just write 
    file, err := os.OpenFile("test.txt", os.O_WRONLY, 0666)
    if err != nil {
    
        log.Fatal(err)
    }
    defer file.Close()

    //  Create... For this file buffered writer
    bufferedWriter := bufio.NewWriter(file)

    //  Write byte to buffer
    bytesWritten, err := bufferedWriter.Write(
        []byte{
    65, 66, 67},
    )
    if err != nil {
    
        log.Fatal(err)
    }
    log.Printf("Bytes written: %d\n", bytesWritten)

    //  Write a string to buffer
    //  You can also use  WriteRune()  and  WriteByte() 
    bytesWritten, err = bufferedWriter.WriteString(
        "Buffered string\n",
    )
    if err != nil {
    
        log.Fatal(err)
    }
    log.Printf("Bytes written: %d\n", bytesWritten)

    //  Check the number of bytes in the cache 
    unflushedBufferSize := bufferedWriter.Buffered()
    log.Printf("Bytes buffered: %d\n", unflushedBufferSize)

    //  How many bytes are still available ( Unused cache size )
    bytesAvailable := bufferedWriter.Available()
    if err != nil {
    
        log.Fatal(err)
    }
    log.Printf("Available buffer: %d\n", bytesAvailable)

    //  Write memory buffer To hard disk 
    bufferedWriter.Flush()

    //  Discard not yet flush The contents of the cache , Clear the error and pass its output to... In the parameter writer
    //  When you want to pass the cache to another writer Useful when 
    bufferedWriter.Reset(bufferedWriter)

    bytesAvailable = bufferedWriter.Available()
    if err != nil {
    
        log.Fatal(err)
    }
    log.Printf("Available buffer: %d\n", bytesAvailable)

    //  Reset cache size .
    //  The first parameter is where the cache should be output , In this example, we use the same writer.
    //  If the new size we set is smaller than the first parameter writer Cache size of ,  such as 10, We won't get a 10 Byte size cache ,
    //  It is writer The original size of the cache , The default is 4096.
    //  Its function is mainly for capacity expansion .
    bufferedWriter = bufio.NewWriterSize(
        bufferedWriter,
        8000,
    )

    // resize After checking the size of the cache 
    bytesAvailable = bufferedWriter.Available()
    if err != nil {
    
        log.Fatal(err)
    }
    log.Printf("Available buffer: %d\n", bytesAvailable)
}

Read most N Bytes

os.File It provides the basic functions of file operation , and io、ioutil、bufio Additional auxiliary functions are provided .

package main

import (
    "os"
    "log"
)

func main() {
    
    //  Open file , read-only 
    file, err := os.Open("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }
    defer file.Close()

    //  Read from file len(b) Byte file .
    //  return 0 Bytes means that the end of the file is read 
    //  Reading the file will return io.EOF Of error
    byteSlice := make([]byte, 16)
    bytesRead, err := file.Read(byteSlice)
    if err != nil {
    
        log.Fatal(err)
    }
    log.Printf("Number of bytes read: %d\n", bytesRead)
    log.Printf("Data read: %s\n", byteSlice)
}

Read exactly N Bytes

package main

import (
    "os"
    "log"
    "io"
)

func main() {
    
    // Open file for reading
    file, err := os.Open("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }

    // file.Read() You can read a small file to a large one byte slice in ,
    //  however io.ReadFull() The number of bytes in the file is less than byte slice An error will be returned when the number of bytes is 
    byteSlice := make([]byte, 2)
    numBytesRead, err := io.ReadFull(file, byteSlice)
    if err != nil {
    
        log.Fatal(err)
    }
    log.Printf("Number of bytes read: %d\n", numBytesRead)
    log.Printf("Data read: %s\n", byteSlice)
}

Read at least N Bytes

package main

import (
    "os"
    "log"
    "io"
)

func main() {
    
    //  Open file , read-only 
    file, err := os.Open("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }

    byteSlice := make([]byte, 512)
    minBytes := 8
    // io.ReadAtLeast() An error is returned when the smallest byte cannot be obtained , But the read files will be kept 
    numBytesRead, err := io.ReadAtLeast(file, byteSlice, minBytes)
    if err != nil {
    
        log.Fatal(err)
    }
    log.Printf("Number of bytes read: %d\n", numBytesRead)
    log.Printf("Data read: %s\n", byteSlice)
}

Read all bytes

package main

import (
    "os"
    "log"
    "fmt"
    "io/ioutil"
)

func main() {
    
    file, err := os.Open("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }

    // os.File.Read(), io.ReadFull()  and 
    // io.ReadAtLeast()  A fixed size is required before reading byte slice.
    //  but ioutil.ReadAll() Will read reader( In this case is file) Every byte of , Then put the bytes slice return .
    data, err := ioutil.ReadAll(file)
    if err != nil {
    
        log.Fatal(err)
    }

    fmt.Printf("Data as hex: %x\n", data)
    fmt.Printf("Data as string: %s\n", data)
    fmt.Println("Number of bytes read:", len(data))
}

Read the memory

package main

import (
    "log"
    "io/ioutil"
)

func main() {
    
    //  Read file to byte slice in 
    data, err := ioutil.ReadFile("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }

    log.Printf("Data read: %s\n", data)
}

Use cache to read

There are cache writes and cache reads . cache reader Will cache some content in memory . It will provide more than os.File and io.Reader More functions , The default cache size is 4096, The minimum cache is 16.

package main

import (
    "os"
    "log"
    "bufio"
    "fmt"
)

func main() {
    
    //  Open file , establish buffered reader
    file, err := os.Open("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }
    bufferedReader := bufio.NewReader(file)

    //  Get bytes , The current pointer remains unchanged 
    byteSlice := make([]byte, 5)
    byteSlice, err = bufferedReader.Peek(5)
    if err != nil {
    
        log.Fatal(err)
    }
    fmt.Printf("Peeked at 5 bytes: %s\n", byteSlice)

    //  Read , The pointer moves at the same time 
    numBytesRead, err := bufferedReader.Read(byteSlice)
    if err != nil {
    
        log.Fatal(err)
    }
    fmt.Printf("Read %d bytes: %s\n", numBytesRead, byteSlice)

    //  Read a byte ,  If the reading is unsuccessful, it will return Error
    myByte, err := bufferedReader.ReadByte()
    if err != nil {
    
        log.Fatal(err)
    }
    fmt.Printf("Read 1 byte: %c\n", myByte)     

    //  Read to separator , Contains the separator , return byte slice
    dataBytes, err := bufferedReader.ReadBytes('\n')
    if err != nil {
    
        log.Fatal(err)
    }
    fmt.Printf("Read bytes: %s\n", dataBytes)           

    //  Read to separator , Contains the separator , Return string 
    dataString, err := bufferedReader.ReadString('\n')
    if err != nil {
    
        log.Fatal(err)
    }
    fmt.Printf("Read string: %s\n", dataString)     

    // This example reads many lines , therefore test.txt It should contain multiple lines of text to avoid errors 
}

Use scanner

Scanner yes bufio Type under package , Useful when working with delimited text in files . Usually, we use newline as separator to divide the file content into multiple lines . stay CSV In file , Commas are generally used as separators .os.File Documents can be packaged as bufio.Scanner, It's like a cache reader. We will call Scan() Method to read the next delimiter , Use Text() perhaps Bytes() Get the read data .

The delimiter can not be a simple byte or character , There is a special way to implement the function of separator , And how much to move the pointer , What data is returned . If there is no custom SplitFunc Provide , Default ScanLines Will use newline Character as separator , Other separation functions include ScanRunes and ScanWords, All in bufio In bag .

// To define your own split function, match this fingerprint
type SplitFunc func(data []byte, atEOF bool) (advance int, token []byte, err error)

// Returning (0, nil, nil) will tell the scanner
// to scan again, but with a bigger buffer because
// it wasn't enough data to reach the delimiter

In the following example , Created... For a file bufio.Scanner, And read word by word :

package main

import (
    "os"
    "log"
    "fmt"
    "bufio"
)

func main() {
    
    file, err := os.Open("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }
    scanner := bufio.NewScanner(file)

    //  The default separator function is bufio.ScanLines, Here we use ScanWords.
    //  You can also customize one SplitFunc Type separator function 
    scanner.Split(bufio.ScanWords)

    // scan next token.
    success := scanner.Scan()
    if success == false {
    
        //  An error or EOF Is to return Error
        err = scanner.Err()
        if err == nil {
    
            log.Println("Scan completed and reached EOF")
        } else {
    
            log.Fatal(err)
        }
    }

    //  Get data ,Bytes()  perhaps  Text()
    fmt.Println("First word found:", scanner.Text())

    //  Call again scanner.Scan() Find the next token
}

pack (zip) file

// This example uses zip but standard library
// also supports tar archives
package main

import (
    "archive/zip"
    "log"
    "os"
)

func main() {
    
    //  Create a package file 
    outFile, err := os.Create("test.zip")
    if err != nil {
    
        log.Fatal(err)
    }
    defer outFile.Close()

    //  establish zip writer
    zipWriter := zip.NewWriter(outFile)


    //  Write files to the package file .
    //  Here we use hard coded content , You can traverse a folder , Write the files in the folder and their contents into the packaged file .
    var filesToArchive = []struct {
    
        Name, Body string
    } {
    
        {
    "test.txt", "String contents of file"},
        {
    "test2.txt", "\x61\x62\x63\n"},
    }

    //  Next, write the contents to be packaged into the packaging file , Write... In turn .
    for _, file := range filesToArchive {
    
            fileWriter, err := zipWriter.Create(file.Name)
            if err != nil {
    
                    log.Fatal(err)
            }
            _, err = fileWriter.Write([]byte(file.Body))
            if err != nil {
    
                    log.Fatal(err)
            }
    }

    //  clear 
    err = zipWriter.Close()
    if err != nil {
    
            log.Fatal(err)
    }
}

extract (unzip) file

// This example uses zip but standard library
// also supports tar archives
package main

import (
    "archive/zip"
    "log"
    "io"
    "os"
    "path/filepath"
)

func main() {
    
    zipReader, err := zip.OpenReader("test.zip")
    if err != nil {
    
        log.Fatal(err)
    }
    defer zipReader.Close()

    //  Traverse each file in the packaged file / Folder 
    for _, file := range zipReader.Reader.File {
    
        //  The file in the package file is like an ordinary file object 
        zippedFile, err := file.Open()
        if err != nil {
    
            log.Fatal(err)
        }
        defer zippedFile.Close()

        //  Specify the extracted file name .
        //  You can specify a full pathname or a prefix , So you can put them in different folders .
        //  Our example uses the same file name in the package file .
        targetDir := "./"
        extractedFilePath := filepath.Join(
            targetDir,
            file.Name,
        )

        //  Extract items or create folders 
        if file.FileInfo().IsDir() {
    
            //  Create a folder and set the same permissions 
            log.Println("Creating directory:", extractedFilePath)
            os.MkdirAll(extractedFilePath, file.Mode())
        } else {
    
            // Extract normal files 
            log.Println("Extracting file:", file.Name)

            outputFile, err := os.OpenFile(
                extractedFilePath,
                os.O_WRONLY|os.O_CREATE|os.O_TRUNC,
                file.Mode(),
            )
            if err != nil {
    
                log.Fatal(err)
            }
            defer outputFile.Close()

            //  adopt io.Copy Simply copy the contents of the file 
            _, err = io.Copy(outputFile, zippedFile)
            if err != nil {
    
                log.Fatal(err)
            }
        }
    }
}

Compressed files

//  Used in this example gzip Compressed format , The standard library also supports zlib, bz2, flate, lzw
package main

import (
    "os"
    "compress/gzip"
    "log"
)

func main() {
    
    outputFile, err := os.Create("test.txt.gz")
    if err != nil {
    
        log.Fatal(err)
    }

    gzipWriter := gzip.NewWriter(outputFile)
    defer gzipWriter.Close()

    //  When we write about gizp writer Data time , It compresses the data in turn and writes it to the underlying file .
    //  We don't have to care how it's compressed , Still like ordinary writer Just do the same .
    _, err = gzipWriter.Write([]byte("Gophers rule!\n"))
    if err != nil {
    
        log.Fatal(err)
    }

    log.Println("Compressed data written to file.")
}

Unzip the file

//  Used in this example gzip Compressed format , The standard library also supports zlib, bz2, flate, lzw
package main

import (
    "compress/gzip"
    "log"
    "io"
    "os"
)

func main() {
    
    //  Open one gzip file .
    //  The file is a reader, But we can use a variety of data sources , such as web Server returned gzipped Content ,
    //  Its content is not a file , It's a memory stream 
    gzipFile, err := os.Open("test.txt.gz")
    if err != nil {
    
        log.Fatal(err)
    }

    gzipReader, err := gzip.NewReader(gzipFile)
    if err != nil {
    
        log.Fatal(err)
    }
    defer gzipReader.Close()

    //  Unzip to a writer, It's a file writer
    outfileWriter, err := os.Create("unzipped.txt")
    if err != nil {
    
        log.Fatal(err)
    }
    defer outfileWriter.Close()

    //  Copy content 
    _, err = io.Copy(outfileWriter, gzipReader)
    if err != nil {
    
        log.Fatal(err)
    }
}

Temporary files and directories

ioutil Two functions are provided : TempDir() and TempFile(). After use , The caller is responsible for deleting these temporary files and folders . One advantage is that when you pass an empty string as the folder name , It creates these items in the temporary folder of the operating system (/tmp on Linux).os.TempDir() Returns the temporary folder of the current operating system .

package main

import (
     "os"
     "io/ioutil"
     "log"
     "fmt"
)

func main() {
    
     //  Create a temporary folder in the system temporary folder 
     tempDirPath, err := ioutil.TempDir("", "myTempDir")
     if err != nil {
    
          log.Fatal(err)
     }
     fmt.Println("Temp dir created:", tempDirPath)

     //  Create a temporary file in a temporary folder 
     tempFile, err := ioutil.TempFile(tempDirPath, "myTempFile.txt")
     if err != nil {
    
          log.Fatal(err)
     }
     fmt.Println("Temp file created:", tempFile.Name())

     // ...  Do something  ...

     //  Close file 
     err = tempFile.Close()
     if err != nil {
    
        log.Fatal(err)
    }

    //  Delete the resource we created 
     err = os.Remove(tempFile.Name())
     if err != nil {
    
        log.Fatal(err)
    }
     err = os.Remove(tempDirPath)
     if err != nil {
    
        log.Fatal(err)
    }
}

adopt HTTP Download the file

package main

import (
     "os"
     "io"
     "log"
     "net/http"
)

func main() {
    
     newFile, err := os.Create("devdungeon.html")
     if err != nil {
    
          log.Fatal(err)
     }
     defer newFile.Close()

     url := "http://www.devdungeon.com/archive"
     response, err := http.Get(url)
     defer response.Body.Close()

     //  take HTTP response Body Write the contents of to the file 
     // Body Satisfy reader Interface , So we can use ioutil.Copy
     numBytesWritten, err := io.Copy(newFile, response.Body)
     if err != nil {
    
          log.Fatal(err)
     }
     log.Printf("Downloaded %d byte file.\n", numBytesWritten)
}

Hash and digest

package main

import (
    "crypto/md5"
    "crypto/sha1"
    "crypto/sha256"
    "crypto/sha512"
    "log"
    "fmt"
    "io/ioutil"
)

func main() {
    
    //  Get the contents of the document 
    data, err := ioutil.ReadFile("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }

    //  Calculation Hash
    fmt.Printf("Md5: %x\n\n", md5.Sum(data))
    fmt.Printf("Sha1: %x\n\n", sha1.Sum(data))
    fmt.Printf("Sha256: %x\n\n", sha256.Sum256(data))
    fmt.Printf("Sha512: %x\n\n", sha512.Sum512(data))
}

The above example copies the contents of the entire file into memory , Pass to hash function . Another way is to create a hash writer, Use Write、WriteString、Copy Pass the data to it . The following example uses md5 hash, But you can use other Writer.

package main

import (
    "crypto/md5"
    "log"
    "fmt"
    "io"
    "os"
)

func main() {
    
    file, err := os.Open("test.txt")
    if err != nil {
    
        log.Fatal(err)
    }
    defer file.Close()

    // Create a new hasher, Satisfy writer Interface 
    hasher := md5.New()
    _, err = io.Copy(hasher, file)
    if err != nil {
    
        log.Fatal(err)
    }

    //  Calculation hash And print the results .
    //  Pass on  nil  As a parameter , Because we can't pass data through parameters , But through writer Interface .
    sum := hasher.Sum(nil)
    fmt.Printf("Md5 checksum: %x\n", sum)
}

Reference material

原网站

版权声明
本文为[It artist rookie]所创,转载请带上原文链接,感谢
https://yzsam.com/2022/174/202206231353437436.html