1 File Read/Write
Go provides two approaches: simple one-shot functions (os.ReadFile/os.WriteFile) for small files, and streaming with os.Open/os.Create for large files.
Simple Read/Write (Entire File)
package main
import (
"fmt"
"log"
"os"
)
func main() {
// Write entire file at once
content := []byte("Hello, Go!\nThis is a test file.\n")
err := os.WriteFile("output.txt", content, 0644)
if err != nil {
log.Fatal(err)
}
// Read entire file at once
data, err := os.ReadFile("output.txt")
if err != nil {
log.Fatal(err)
}
fmt.Println(string(data))
}
Streaming Read/Write (Large Files)
package main
import (
"fmt"
"io"
"log"
"os"
)
func main() {
// Write with os.Create
file, err := os.Create("large_output.txt")
if err != nil {
log.Fatal(err)
}
defer file.Close()
for i := 0; i < 1000; i++ {
fmt.Fprintf(file, "Line %d: some data here\n", i+1)
}
// Read with os.Open (streaming)
src, err := os.Open("large_output.txt")
if err != nil {
log.Fatal(err)
}
defer src.Close()
buf := make([]byte, 1024)
for {
n, err := src.Read(buf)
if n > 0 {
fmt.Print(string(buf[:n]))
}
if err == io.EOF {
break
}
if err != nil {
log.Fatal(err)
}
}
}
// Append to existing file
func appendToFile(filename, text string) error {
f, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
return err
}
defer f.Close()
_, err = f.WriteString(text)
return err
}
When to Use Which
- β’
os.ReadFile/os.WriteFileβ config files, small data, quick scripts - β’
os.Open/os.Createβ large files, streaming, line-by-line processing - β’ Always
defer file.Close()when using os.Open/Create/OpenFile - β’ File permissions:
0644(owner read/write, others read) is typical for regular files
2 bufio Read/Write
The bufio package provides buffered I/O, which is more efficient for line-by-line reading and frequent small writes.
Line-by-Line Reading with Scanner
package main
import (
"bufio"
"fmt"
"log"
"os"
)
func main() {
file, err := os.Open("data.txt")
if err != nil {
log.Fatal(err)
}
defer file.Close()
scanner := bufio.NewScanner(file)
lineNum := 0
for scanner.Scan() {
lineNum++
line := scanner.Text()
fmt.Printf("%d: %s\n", lineNum, line)
}
if err := scanner.Err(); err != nil {
log.Fatal(err)
}
}
// For lines longer than 64KB, increase the buffer
func scanLargeLines(filename string) error {
file, err := os.Open(filename)
if err != nil {
return err
}
defer file.Close()
scanner := bufio.NewScanner(file)
scanner.Buffer(make([]byte, 0), 1024*1024) // max 1MB per line
for scanner.Scan() {
processLine(scanner.Text())
}
return scanner.Err()
}
func processLine(line string) {
// process each line
}
Reading with bufio.NewReader
func readWithReader(filename string) error {
file, err := os.Open(filename)
if err != nil {
return err
}
defer file.Close()
reader := bufio.NewReader(file)
for {
line, err := reader.ReadString('\n')
if len(line) > 0 {
fmt.Print(line) // includes the newline character
}
if err != nil {
break
}
}
return nil
}
Buffered Writing
func writeBuffered(filename string, lines []string) error {
file, err := os.Create(filename)
if err != nil {
return err
}
defer file.Close()
writer := bufio.NewWriter(file)
for _, line := range lines {
_, err := writer.WriteString(line + "\n")
if err != nil {
return err
}
}
// Flush is critical β unflushed data is lost!
return writer.Flush()
}
β οΈ Don't Forget Flush!
bufio.Writer batches writes in memory. You must call writer.Flush() to write the remaining buffered data to disk. Forgetting this is a common source of data loss.
3 Directory Operations
Go provides os for directory creation and listing, and filepath for cross-platform path manipulation and recursive traversal.
Creating & Listing Directories
package main
import (
"fmt"
"log"
"os"
)
func main() {
// Create a single directory
err := os.Mkdir("mydir", 0755)
if err != nil && !os.IsExist(err) {
log.Fatal(err)
}
// Create nested directories (like mkdir -p)
err = os.MkdirAll("parent/child/grandchild", 0755)
if err != nil {
log.Fatal(err)
}
// List directory entries
entries, err := os.ReadDir(".")
if err != nil {
log.Fatal(err)
}
for _, entry := range entries {
info, _ := entry.Info()
if entry.IsDir() {
fmt.Printf("[DIR] %s/\n", entry.Name())
} else {
fmt.Printf("[FILE] %s (%d bytes)\n", entry.Name(), info.Size())
}
}
}
Recursive Traversal with filepath.WalkDir
package main
import (
"fmt"
"io/fs"
"path/filepath"
"strings"
)
func main() {
root := "."
// WalkDir is more efficient than Walk (doesn't call Stat on every file)
err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
// Skip hidden directories
if d.IsDir() && strings.HasPrefix(d.Name(), ".") {
return filepath.SkipDir
}
// Print only .go files
if !d.IsDir() && strings.HasSuffix(d.Name(), ".go") {
fmt.Println(path)
}
return nil
})
if err != nil {
fmt.Printf("Error walking directory: %v\n", err)
}
}
Path Manipulation
import "path/filepath"
path := "/home/user/documents/report.pdf"
fmt.Println(filepath.Base(path)) // report.pdf
fmt.Println(filepath.Dir(path)) // /home/user/documents
fmt.Println(filepath.Ext(path)) // .pdf
fmt.Println(filepath.Join("a", "b")) // a/b (platform-aware separator)
// Get absolute path
abs, _ := filepath.Abs("relative/path")
fmt.Println(abs)
// Match patterns
matched, _ := filepath.Match("*.go", "main.go") // true
4 JSON File Processing
Combining file I/O with encoding/json for reading and writing structured data files.
Read/Write JSON Files
package main
import (
"encoding/json"
"fmt"
"log"
"os"
)
type Config struct {
Host string `json:"host"`
Port int `json:"port"`
Debug bool `json:"debug"`
AllowIPs []string `json:"allow_ips"`
}
// Write JSON to file
func saveConfig(filename string, cfg Config) error {
file, err := os.Create(filename)
if err != nil {
return err
}
defer file.Close()
encoder := json.NewEncoder(file)
encoder.SetIndent("", " ")
return encoder.Encode(cfg)
}
// Read JSON from file
func loadConfig(filename string) (Config, error) {
var cfg Config
file, err := os.Open(filename)
if err != nil {
return cfg, err
}
defer file.Close()
return cfg, json.NewDecoder(file).Decode(&cfg)
}
func main() {
cfg := Config{
Host: "localhost",
Port: 8080,
Debug: true,
AllowIPs: []string{"127.0.0.1", "192.168.1.0/24"},
}
if err := saveConfig("config.json", cfg); err != nil {
log.Fatal(err)
}
fmt.Println("Config saved")
loaded, err := loadConfig("config.json")
if err != nil {
log.Fatal(err)
}
fmt.Printf("Loaded: %+v\n", loaded)
}
Generated config.json
{
"host": "localhost",
"port": 8080,
"debug": true,
"allow_ips": [
"127.0.0.1",
"192.168.1.0/24"
]
}
Streaming Large JSON Arrays
// Process a JSON array without loading everything into memory
func processLargeJSON(filename string) error {
file, err := os.Open(filename)
if err != nil {
return err
}
defer file.Close()
decoder := json.NewDecoder(file)
// Read the opening bracket [
_, err = decoder.Token()
if err != nil {
return err
}
// Decode each element one at a time
for decoder.More() {
var item map[string]interface{}
if err := decoder.Decode(&item); err != nil {
return err
}
fmt.Printf("Item: %v\n", item)
}
// Read the closing bracket ]
_, err = decoder.Token()
return err
}
5 CSV Processing
Go's encoding/csv package handles CSV parsing and generation, including quoted fields and custom delimiters.
Reading CSV
package main
import (
"encoding/csv"
"fmt"
"io"
"log"
"os"
"strconv"
)
type Employee struct {
Name string
Department string
Salary float64
}
func readCSV(filename string) ([]Employee, error) {
file, err := os.Open(filename)
if err != nil {
return nil, err
}
defer file.Close()
reader := csv.NewReader(file)
// Skip header row
if _, err := reader.Read(); err != nil {
return nil, err
}
var employees []Employee
for {
record, err := reader.Read()
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
salary, _ := strconv.ParseFloat(record[2], 64)
employees = append(employees, Employee{
Name: record[0],
Department: record[1],
Salary: salary,
})
}
return employees, nil
}
func main() {
employees, err := readCSV("employees.csv")
if err != nil {
log.Fatal(err)
}
for _, e := range employees {
fmt.Printf("%s (%s): $%.2f\n", e.Name, e.Department, e.Salary)
}
}
Writing CSV
func writeCSV(filename string, employees []Employee) error {
file, err := os.Create(filename)
if err != nil {
return err
}
defer file.Close()
writer := csv.NewWriter(file)
defer writer.Flush()
// Write header
if err := writer.Write([]string{"Name", "Department", "Salary"}); err != nil {
return err
}
// Write data rows
for _, e := range employees {
record := []string{
e.Name,
e.Department,
strconv.FormatFloat(e.Salary, 'f', 2, 64),
}
if err := writer.Write(record); err != nil {
return err
}
}
return writer.Error()
}
// Read all records at once (small files)
func readAllCSV(filename string) ([][]string, error) {
file, err := os.Open(filename)
if err != nil {
return nil, err
}
defer file.Close()
return csv.NewReader(file).ReadAll()
}
CSV Tips
- β’
csv.NewWriteralso requiresFlush()β don't forget it - β’ Set
reader.Comma = '\t'for TSV files - β’ Set
reader.LazyQuotes = truefor less strict parsing - β’ Use
reader.ReadAll()only for small files that fit in memory
6 Chapter Summary
File Read/Write
os.ReadFile/WriteFile for small, os.Open/Create for streaming
bufio
Scanner for line-by-line, Reader/Writer with buffering
Directories
Mkdir, ReadDir, WalkDir, filepath utilities
JSON Files
Encoder/Decoder for file streaming, MarshalIndent
CSV
csv.Reader/Writer, custom delimiters, batch ReadAll
Best Practices
defer Close, Flush buffers, handle EOF properly