Pre_backup:sh
#!/bin/bash
#scst_lv1 hangt aan de Terminal Server (192.168.32.241)
#scst_lv2 hangt aan de Exchange Server (192.168.32.231)
#scst_lv3 hangt aan de File Server (192.168.32.230)
#scst_lv4 hangt aan de Alure Server (192.168.32.232)
sudo /usr/sbin/lvcreate -L40G -s -n scst_lv1-snap /dev/backup_vg/scst_lv1
sudo /usr/sbin/lvcreate -L40G -s -n scst_lv2-snap /dev/backup_vg/scst_lv2
sudo /usr/sbin/lvcreate -L40G -s -n scst_lv3-snap /dev/backup_vg/scst_lv3
sudo /usr/sbin/lvcreate -L40G -s -n scst_lv4-snap /dev/backup_vg/scst_lv4
# Mount de snapshot (mooiste is als deze even groot is als het origineel,
dan kan er eigenlijk niets mis gaan
sudo /bin/mount -t ntfs -o loop,ro,offset=$(( 63*512 ))
/dev/mapper/backup_vg-scst_lv1--snap /backupmnts/SCSTDISC1
sudo /bin/mount -t ntfs -o loop,ro,offset=$(( 63*512 ))
/dev/mapper/backup_vg-scst_lv2--snap /backupmnts/SCSTDISC2
sudo /bin/mount -t ntfs -o loop,ro,offset=$(( 63*512 ))
/dev/mapper/backup_vg-scst_lv3--snap /backupmnts/SCSTDISC3
sudo /bin/mount -t ntfs -o loop,ro,offset=$(( 63*512 ))
/dev/mapper/backup_vg-scst_lv4--snap /backupmnts/SCSTDISC4
exit 0
Direcotor conf:
# Default Bacula Director Configuration file
#
# The only thing that MUST be changed is to add one or more
# file or directory names in the Include directive of the
# FileSet resource.
#
# For Bacula release 5.0.2 (28 April 2010) -- redhat
#
# You might also want to change the default email address
# from root to your address. See the "mail" and "operator"
# directives in the Messages resource.
#
Director { # define myself
Name = backup.localdomain-dir
DIRport = 9101 # where we listen for UA connections
QueryFile = "/usr/lib/bacula/query.sql"
WorkingDirectory = "/var/lib/bacula"
PidDirectory = "/var/run"
Maximum Concurrent Jobs = 1
Password = "orion123" # Console password
Messages = Daemon
}
#JobDefs
@/etc/bacula/dir-conf.d/jobdefs.conf
#Jobs
@/etc/bacula/dir-conf.d/dailybackup.conf
# Backup the catalog database (after the nightly save)
@/etc/bacula/dir-conf.d/backupjob-catalog.conf
# Standard Restore template, to be changed by Console program
# Only one such job is needed for all Jobs/Clients/Storage ...
@/etc/bacula/dir-conf.d/restorejob.conf
# List of files to be backed up
@/etc/bacula/dir-conf.d/full-fileset.conf
# Schedule
@/etc/bacula/dir-conf.d/schedules.conf
# This schedule does the catalog.
@/etc/bacula/dir-conf.d/catalog-schedule.conf
# This is the backup of the catalog
@/etc/bacula/dir-conf.d/catalog-fileset.conf
# Client (File Services) to backup
@/etc/bacula/dir-conf.d/clients.conf
# Definition of file storage device
@/etc/bacula/dir-conf.d/storage.conf
# Generic catalog service
@/etc/bacula/dir-conf.d/catalog.conf
#
# Message delivery for daemon messages (no job).
Messages {
Name = Daemon
mailcommand = "/usr/sbin/bsmtp -h 192.168.32.253 -f \"\(Swartenco
Bacula\) \<%r\>\" -s \"Swartenco Bacula daemon message\" %r"
mail = h.rademaker AT aashq DOT nl = all, !skipped
console = all, !skipped, !saved
append = "/var/lib/bacula/log" = all, !skipped
}
# Default pool definition
@/etc/bacula/dir-conf.d/pools.conf
#
# Restricted console used by tray-monitor to get the status of the
director
#
Console {
Name = backup.localdomain-mon
Password = "orion123"
CommandACL = status, .status
}
[root@localhost ~]# vgdisplay
/dev/loop0: read failed after 0 of 2048 at 0: Input/output error
--- Volume group ---
VG Name backup_vg
System ID
Format lvm2
Metadata Areas 1
Metadata Sequence No 713
VG Access read/write
VG Status resizable
MAX LV 0
Cur LV 8
Open LV 8
Max PV 0
Cur PV 1
Act PV 1
VG Size 1.82 TB
PE Size 4.00 MB
Total PE 476931
Alloc PE / Size 296960 / 1.13 TB
Free PE / Size 179971 / 703.01 GB
VG UUID dwgB3T-Agrp-jvBt-JvG5-ZSic-bzH3-Gae3Cy
--- Volume group ---
VG Name VolGroup00
System ID
Format lvm2
Metadata Areas 1
Metadata Sequence No 3
VG Access read/write
VG Status resizable
MAX LV 0
Cur LV 2
Open LV 2
Max PV 0
Cur PV 1
Act PV 1
VG Size 230.59 GB
PE Size 32.00 MB
Total PE 7379
Alloc PE / Size 7379 / 230.59 GB
Free PE / Size 0 / 0
VG UUID nWmzqx-kl8X-XHW0-mjUQ-XYlX-UOoG-TjLY0B
[root@localhost ~]# lvdisplay
/dev/loop0: read failed after 0 of 2048 at 0: Input/output error
--- Logical volume ---
LV Name /dev/backup_vg/scst_lv2
VG Name backup_vg
LV UUID 8JIraH-mKok-ggTk-hLAO-1zu4-h3Sl-D9Gty1
LV Write Access read/write
LV snapshot status source of
/dev/backup_vg/scst_lv2-snap [active]
LV Status available
# open 1
LV Size 100.00 GB
Current LE 25600
Segments 1
Allocation inherit
Read ahead sectors auto
- currently set to 256
Block device 253:2
--- Logical volume ---
LV Name /dev/backup_vg/scst_lv1
VG Name backup_vg
LV UUID URnzy4-mKDj-zU2D-17Wu-1WiI-aiVM-bQYWIx
LV Write Access read/write
LV snapshot status source of
/dev/backup_vg/scst_lv1-snap [INACTIVE]
LV Status available
# open 1
LV Size 600.00 GB
Current LE 153600
Segments 1
Allocation inherit
Read ahead sectors auto
- currently set to 256
Block device 253:4
--- Logical volume ---
LV Name /dev/backup_vg/scst_lv4
VG Name backup_vg
LV UUID U1bTdf-xZmj-Hr19-ZQhN-4S56-rTNV-j011GO
LV Write Access read/write
LV snapshot status source of
/dev/backup_vg/scst_lv4-snap [active]
LV Status available
# open 1
LV Size 100.00 GB
Current LE 25600
Segments 1
Allocation inherit
Read ahead sectors auto
- currently set to 256
Block device 253:5
--- Logical volume ---
LV Name /dev/backup_vg/scst_lv3
VG Name backup_vg
LV UUID 7ds36s-sDWP-18gY-NO5f-sGdB-RBv9-lmYzBb
LV Write Access read/write
LV snapshot status source of
/dev/backup_vg/scst_lv3-snap [INACTIVE]
LV Status available
# open 1
LV Size 200.00 GB
Current LE 51200
Segments 1
Allocation inherit
Read ahead sectors auto
- currently set to 256
Block device 253:3
--- Logical volume ---
LV Name /dev/backup_vg/scst_lv1-snap
VG Name backup_vg
LV UUID u7uVz3-iCEM-256T-cpVu-qvOg-0WWy-0Fr5Nk
LV Write Access read/write
LV snapshot status INACTIVE destination for /dev/backup_vg/scst_lv1
LV Status available
# open 2
LV Size 600.00 GB
Current LE 153600
COW-table size 40.00 GB
COW-table LE 10240
Snapshot chunk size 4.00 KB
Segments 1
Allocation inherit
Read ahead sectors auto
- currently set to 256
Block device 253:6
--- Logical volume ---
LV Name /dev/backup_vg/scst_lv2-snap
VG Name backup_vg
LV UUID uOCe8f-XWbu-QXiP-FoaQ-jBZc-jBmE-elVEaN
LV Write Access read/write
LV snapshot status active destination for /dev/backup_vg/scst_lv2
LV Status available
# open 2
LV Size 100.00 GB
Current LE 25600
COW-table size 40.00 GB
COW-table LE 10240
Allocated to snapshot 3.25%
Snapshot chunk size 4.00 KB
Segments 1
Allocation inherit
Read ahead sectors auto
- currently set to 256
Block device 253:9
--- Logical volume ---
LV Name /dev/backup_vg/scst_lv3-snap
VG Name backup_vg
LV UUID E0CF8h-PtXf-cO5X-givv-mvcA-9Uui-2L2Hv0
LV Write Access read/write
LV snapshot status INACTIVE destination for /dev/backup_vg/scst_lv3
LV Status available
# open 2
LV Size 200.00 GB
Current LE 51200
COW-table size 40.00 GB
COW-table LE 10240
Snapshot chunk size 4.00 KB
Segments 1
Allocation inherit
Read ahead sectors auto
- currently set to 256
Block device 253:12
--- Logical volume ---
LV Name /dev/backup_vg/scst_lv4-snap
VG Name backup_vg
LV UUID l0UGU9-lYdk-isq8-yC3O-lizw-8Xn5-Ap52af
LV Write Access read/write
LV snapshot status active destination for /dev/backup_vg/scst_lv4
LV Status available
# open 2
LV Size 100.00 GB
Current LE 25600
COW-table size 40.00 GB
COW-table LE 10240
Allocated to snapshot 7.69%
Snapshot chunk size 4.00 KB
Segments 1
Allocation inherit
Read ahead sectors auto
- currently set to 256
Block device 253:15
--- Logical volume ---
LV Name /dev/VolGroup00/LogVol00
VG Name VolGroup00
LV UUID wdC7e0-YxzK-YDxg-rKGR-gdUK-ljG1-R41QNq
LV Write Access read/write
LV Status available
# open 1
LV Size 225.44 GB
Current LE 7214
Segments 1
Allocation inherit
Read ahead sectors auto
- currently set to 256
Block device 253:0
--- Logical volume ---
LV Name /dev/VolGroup00/LogVol01
VG Name VolGroup00
LV UUID rBC0ql-bwvi-v4FE-uYNM-BzC1-1p1D-dOwOTd
LV Write Access read/write
LV Status available
# open 1
LV Size 5.16 GB
Current LE 165
Segments 1
Allocation inherit
Read ahead sectors auto
- currently set to 256
Block device 253:1
From:
Konstantin Khomoutov <flatworm AT users.sourceforge DOT net>
To:
René Klomp <r.klomp AT aashq DOT nl>
Cc:
bacula-users AT lists.sourceforge DOT net
Date:
06-09-2011 13:05
Subject:
Re: [Bacula-users] Bacula backup problems
On Tue, 6 Sep 2011 12:18:45 +0200
René Klomp <r.klomp AT aashq DOT nl> wrote:
> I have a problem with a new backup server at one of our customers.
> The person who nomally cofigures and manages the bacup servers is on
> holiday at the moment. He set up this backup server before his
> holiday, but it is not working and my bacula/linux knowledge is not
> enough to solve this..
> I hope somebody here can help me in the right direction to get this
> backup server running correctly.
> This is backup/error log:
>
> 01-Sep 08:00 backup.localdomain-dir JobId 167: shell command: run
> BeforeJob "/etc/bacula/pre_backup.sh"
> 01-Sep 08:00 backup.localdomain-dir JobId 167:
> BeforeJob: /dev/loop0: read failed after 0 of 4096 at 0:
> Input/output error 01-Sep 08:00 backup.localdomain-dir JobId 167:
> BeforeJob: Logical volume "scst_lv1-snap" already exists in volume
> group "backup_vg" 01-Sep 08:00 backup.localdomain-dir JobId 167:
[...]
Judging from those "lv", "vg" and "snap" bits, it appears that Bacula
is configured to make backups using "LVM snapshots" [1].
Supposedly that /etc/bacula/pre_backup.sh script is responsible for
creation of a snapshot for one or more LVM logical volumes (they hold
filesystems) and mounting them somewhere for the Bacula to back up.
So without the in-depth details about how this is implemented, it's
impossible to tell anything further.
Bacula itself has nothing to do with this.
In theory, you could provide the director's config file, the contents
of that "pre_backup.sh" script and the output of running
# vgdisplay
and
# lvdisplay
so we could try to figure out how it's supposed to work and how to
fix the situation, but I'd rather call that person out from their
holidays.
1. http://en.wikipedia.org/wiki/Logical_Volume_Manager_%28Linux%29
------------------------------------------------------------------------------
Special Offer -- Download ArcSight Logger for FREE!
Finally, a world-class log management solution at an even better
price-free! And you'll get a free "Love Thy Logs" t-shirt when you
download Logger. Secure your free ArcSight Logger TODAY!
http://p.sf.net/sfu/arcsisghtdev2dev
_______________________________________________
Bacula-users mailing list
Bacula-users AT lists.sourceforge DOT net
https://lists.sourceforge.net/lists/listinfo/bacula-users
|